lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
|---|---|---|---|---|---|---|---|---|---|---|---|
Java
|
apache-2.0
|
807583ab512e4cbf3a4370f7ec1aaad51f0f19f5
| 0
|
gbif/gbif-api
|
/*
* Copyright 2013 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.api.vocabulary;
import org.gbif.api.util.VocabularyUtils;
import java.util.List;
import javax.annotation.Nullable;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
/**
* Enumeration for all possible identifier types.
*/
public enum IdentifierType {
URL,
/**
* Reference controlled by a separate system, used for example by DOI.
* {http://en.wikipedia.org/wiki/Handle_(computing)}
*/
LSID,
HANDLER,
DOI,
UUID,
FTP,
URI,
UNKNOWN,
/**
* Indicates the identifier originated from an auto_increment column in the portal.data_provider or
* portal.data_resource table respectively.
*/
GBIF_PORTAL,
/**
* Identifies the node (e.g: 'DK' for Denmark, 'sp2000' for Species 2000).
*/
GBIF_NODE,
/**
* Participant identifier from the GBIF Directory.
*/
GBIF_PARTICIPANT;
// TODO: Check if this is used, it didn't exist in the new Registry2 API, but I preserved it from the old vocabulary
public static final List<IdentifierType> TYPES;
static {
TYPES = ImmutableList.copyOf(IdentifierType.values());
}
/**
* @return the matching IdentifierType or null
*/
public static IdentifierType fromString(String identifierType) {
return (IdentifierType) VocabularyUtils.lookupEnum(identifierType, IdentifierType.class);
}
/**
* Tries to infer the identifier type from a given identifier.
* Most identifiers have a URI protocol prefix or a specific structure that
* allows the guess.
*
* @return the inferred identifier type or Unknown if identifier is null or cant be inferred.
*/
public static IdentifierType inferFrom(@Nullable String identifier) {
String lcIdentifier = Strings.nullToEmpty(identifier).trim().toLowerCase();
if (lcIdentifier.isEmpty()) {
return UNKNOWN;
}
if (lcIdentifier.startsWith(org.gbif.api.model.common.DOI.GBIF_PREFIX)
|| lcIdentifier.startsWith(org.gbif.api.model.common.DOI.TEST_PREFIX)) {
return DOI;
}
if (lcIdentifier.startsWith("10.")
|| lcIdentifier.startsWith("doi:10.")
|| lcIdentifier.startsWith("urn:doi:10.")
|| lcIdentifier.startsWith("http://dx.doi.org/10.")
|| lcIdentifier.startsWith("https://dx.doi.org/10.")
|| lcIdentifier.startsWith("http://doi.org/10.")
|| lcIdentifier.startsWith("https://doi.org/10.")) {
return DOI;
}
if (lcIdentifier.startsWith("http:")
|| lcIdentifier.startsWith("https:")
|| lcIdentifier.startsWith("www.")) {
return URL;
}
if (lcIdentifier.startsWith("ftp:")) {
return FTP;
}
if (lcIdentifier.startsWith("urn:lsid:") || lcIdentifier.startsWith("lsid:")) {
return LSID;
}
if (lcIdentifier.startsWith("urn:uuid:") || lcIdentifier.startsWith("uuid:")) {
return UUID;
}
try {
java.util.UUID.fromString(lcIdentifier);
return UUID;
} catch (IllegalArgumentException ignored) {
// We're just trying to convert a String to anything readable. Apparently the UUID approach failed.
}
return UNKNOWN;
}
}
|
src/main/java/org/gbif/api/vocabulary/IdentifierType.java
|
/*
* Copyright 2013 Global Biodiversity Information Facility (GBIF)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gbif.api.vocabulary;
import org.gbif.api.util.VocabularyUtils;
import java.util.List;
import javax.annotation.Nullable;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
/**
* Enumeration for all possible identifier types.
*/
public enum IdentifierType {
URL,
/**
* Reference controlled by a separate system, used for example by DOI.
* {http://en.wikipedia.org/wiki/Handle_(computing)}
*/
LSID,
HANDLER,
DOI,
UUID,
FTP,
URI,
UNKNOWN,
/**
* Indicates the identifier originated from an auto_increment column in the portal.data_provider or
* portal.data_resource table respectively.
*/
GBIF_PORTAL,
/**
* Identifies the node (e.g: 'DK' for Denmark, 'sp2000' for Species 2000).
*/
GBIF_NODE,
/**
* Participant identifier from the GBIF Directory.
*/
GBIF_PARTICIPANT;
// TODO: Check if this is used, it didn't exist in the new Registry2 API, but I preserved it from the old vocabulary
public static final List<IdentifierType> TYPES;
static {
TYPES = ImmutableList.copyOf(IdentifierType.values());
}
/**
* @return the matching IdentifierType or null
*/
public static IdentifierType fromString(String identifierType) {
return (IdentifierType) VocabularyUtils.lookupEnum(identifierType, IdentifierType.class);
}
/**
* Tries to infer the identifier type from a given identifier.
* Most identifiers have a URI protocol prefix or a specific structure that
* allows the guess.
*
* @return the inferred identifier type or Unknown if identifier is null or cant be inferred.
*/
public static IdentifierType inferFrom(@Nullable String identifier) {
String lcIdentifier = Strings.nullToEmpty(identifier).trim().toLowerCase();
if (lcIdentifier.isEmpty()) {
return UNKNOWN;
}
if (lcIdentifier.startsWith(org.gbif.api.model.common.DOI.GBIF_PREFIX)
|| lcIdentifier.startsWith(org.gbif.api.model.common.DOI.TEST_PREFIX)) {
return DOI;
}
if (lcIdentifier.startsWith("doi:10")
|| lcIdentifier.startsWith("urn:doi:")
|| lcIdentifier.startsWith("http://dx.doi.org/10.")
|| lcIdentifier.startsWith("https://dx.doi.org/10.")
|| lcIdentifier.startsWith("http://doi.org/10.")
|| lcIdentifier.startsWith("https://doi.org/10.")) {
return DOI;
}
if (lcIdentifier.startsWith("http:")
|| lcIdentifier.startsWith("https:")
|| lcIdentifier.startsWith("www.")) {
return URL;
}
if (lcIdentifier.startsWith("ftp:")) {
return FTP;
}
if (lcIdentifier.startsWith("urn:lsid:") || lcIdentifier.startsWith("lsid:")) {
return LSID;
}
if (lcIdentifier.startsWith("urn:uuid:") || lcIdentifier.startsWith("uuid:")) {
return UUID;
}
try {
java.util.UUID.fromString(lcIdentifier);
return UUID;
} catch (IllegalArgumentException ignored) {
// We're just trying to convert a String to anything readable. Apparently the UUID approach failed.
}
return UNKNOWN;
}
}
|
Need to detect these DOIs.
|
src/main/java/org/gbif/api/vocabulary/IdentifierType.java
|
Need to detect these DOIs.
|
|
Java
|
apache-2.0
|
16b9b3f337029d4e9586561f9a548e55b92e3ed5
| 0
|
JohnPJenkins/swift-t,JohnPJenkins/swift-t,blue42u/swift-t,JohnPJenkins/swift-t,JohnPJenkins/swift-t,blue42u/swift-t,swift-lang/swift-t,swift-lang/swift-t,blue42u/swift-t,JohnPJenkins/swift-t,basheersubei/swift-t,JohnPJenkins/swift-t,swift-lang/swift-t,swift-lang/swift-t,basheersubei/swift-t,blue42u/swift-t,swift-lang/swift-t,basheersubei/swift-t,basheersubei/swift-t,blue42u/swift-t,basheersubei/swift-t,swift-lang/swift-t,blue42u/swift-t,blue42u/swift-t,swift-lang/swift-t,basheersubei/swift-t,JohnPJenkins/swift-t,basheersubei/swift-t
|
/*
* Copyright 2013 University of Chicago and Argonne National Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package exm.stc.tclbackend;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import exm.stc.common.exceptions.STCRuntimeError;
import exm.stc.common.lang.Arg;
import exm.stc.common.lang.Types;
import exm.stc.common.lang.Types.Type;
import exm.stc.common.lang.Var;
import exm.stc.tclbackend.tree.Expression;
import exm.stc.tclbackend.tree.Expression.ExprContext;
import exm.stc.tclbackend.tree.LiteralFloat;
import exm.stc.tclbackend.tree.LiteralInt;
import exm.stc.tclbackend.tree.TclList;
import exm.stc.tclbackend.tree.TclString;
import exm.stc.tclbackend.tree.Value;
public class TclUtil {
public static Expression argToExpr(Arg in) {
return argToExpr(in, false);
}
public static List<Expression> argsToExpr(List<Arg> in) {
List<Expression> res = new ArrayList<Expression>(in.size());
for (Arg a: in) {
res.add(argToExpr(a));
}
return res;
}
public static Expression argToExpr(Arg in, boolean passThroughNull) {
if (in == null) {
if (passThroughNull) {
return null;
} else {
throw new STCRuntimeError("Unexpected null variable in argToExpr");
}
}
switch (in.getKind()) {
case INTVAL:
return new LiteralInt(in.getIntLit());
case BOOLVAL:
return new LiteralInt(in.getBoolLit() ? 1 : 0);
case STRINGVAL:
return new TclString(in.getStringLit(), true);
case VAR:
return varToExpr(in.getVar());
case FLOATVAL:
return new LiteralFloat(in.getFloatLit());
default:
throw new STCRuntimeError("Unknown oparg type: "
+ in.getKind().toString());
}
}
public static Value varToExpr(Var v) {
return varToExpr(v, false);
}
public static Value varToExpr(Var v, boolean passThroughNull) {
if (v == null) {
if (passThroughNull) {
return null;
} else {
throw new STCRuntimeError("Unexpected null variable in varToExpr");
}
}
Value val = new Value(TclNamer.prefixVar(v.name()));
if (representationIsTclList(v.type())) {
val.setTreatAsList(true);
}
val.setSupportsStringList(supportsStringList(v.type()));
return val;
}
/**
* Whether we can include value of this time in string list, e.g.
* "${x} ${y}"
* @param type
* @return
*/
public static boolean supportsStringList(Type type) {
// Can't escape these types correctly
List<Type> badTypes = Arrays.asList(Types.V_STRING, Types.V_BLOB);
for (Type t: badTypes) {
if (type.assignableTo(t)) {
return false;
}
}
return true;
}
public static boolean representationIsTclList(Type type) {
if (Types.isFileRef(type) || Types.isStructLocal(type) ||
Types.isFileVal(type) || Types.isContainerLocal(type)) {
return true;
} else if (isHandle(type)) {
// treat handles as lists, since they might be a list of id + subscript
// TODO: more selective treatment?
return true;
}
return false;
}
/**
* If it's a handle to shared data
* @param type
* @return
*/
private static boolean isHandle(Type type) {
return Types.isPrimFuture(type) || Types.isStruct(type) ||
Types.isContainer(type) || Types.isPrimUpdateable(type) ||
Types.isRef(type);
}
public static List<Expression> varsToExpr(List<Var> inputs) {
List<Expression> res = new ArrayList<Expression>(inputs.size());
for (Var in: inputs) {
res.add(varToExpr(in));
}
return res;
}
public static TclList tclListOfVariables(List<Var> inputs) {
TclList result = new TclList();
for (Var v: inputs)
result.add(varToExpr(v));
return result;
}
public static TclList tclListOfArgs(List<Arg> inputs) {
TclList result = new TclList();
for (Arg a: inputs)
result.add(argToExpr(a));
return result;
}
/**
* Try to pack list of expressions into a string that is a valid
* tcl list
* Fallback to list if we don't know how to do escaping correctly
* @param ruleTokens
* @return
*/
public static Expression tclStringAsList(List<Expression> ruleTokens) {
boolean canUseString = true;
for (Expression tok: ruleTokens) {
if (!tok.supportsStringList()) {
canUseString = false;
break;
}
}
if (canUseString) {
return new TclString(ruleTokens, ExprContext.LIST_STRING);
} else {
return new TclList(ruleTokens);
}
}
}
|
code/src/exm/stc/tclbackend/TclUtil.java
|
/*
* Copyright 2013 University of Chicago and Argonne National Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package exm.stc.tclbackend;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import exm.stc.common.exceptions.STCRuntimeError;
import exm.stc.common.lang.Arg;
import exm.stc.common.lang.Types;
import exm.stc.common.lang.Types.Type;
import exm.stc.common.lang.Var;
import exm.stc.tclbackend.tree.Expression;
import exm.stc.tclbackend.tree.Expression.ExprContext;
import exm.stc.tclbackend.tree.LiteralFloat;
import exm.stc.tclbackend.tree.LiteralInt;
import exm.stc.tclbackend.tree.TclList;
import exm.stc.tclbackend.tree.TclString;
import exm.stc.tclbackend.tree.Value;
public class TclUtil {
public static Expression argToExpr(Arg in) {
return argToExpr(in, false);
}
public static List<Expression> argsToExpr(List<Arg> in) {
List<Expression> res = new ArrayList<Expression>(in.size());
for (Arg a: in) {
res.add(argToExpr(a));
}
return res;
}
public static Expression argToExpr(Arg in, boolean passThroughNull) {
if (in == null) {
if (passThroughNull) {
return null;
} else {
throw new STCRuntimeError("Unexpected null variable in argToExpr");
}
}
switch (in.getKind()) {
case INTVAL:
return new LiteralInt(in.getIntLit());
case BOOLVAL:
return new LiteralInt(in.getBoolLit() ? 1 : 0);
case STRINGVAL:
return new TclString(in.getStringLit(), true);
case VAR:
return varToExpr(in.getVar());
case FLOATVAL:
return new LiteralFloat(in.getFloatLit());
default:
throw new STCRuntimeError("Unknown oparg type: "
+ in.getKind().toString());
}
}
public static Value varToExpr(Var v) {
return varToExpr(v, false);
}
public static Value varToExpr(Var v, boolean passThroughNull) {
if (v == null) {
if (passThroughNull) {
return null;
} else {
throw new STCRuntimeError("Unexpected null variable in varToExpr");
}
}
Value val = new Value(TclNamer.prefixVar(v.name()));
if (representationIsTclList(v.type())) {
val.setTreatAsList(true);
}
val.setSupportsStringList(supportsStringList(v.type()));
return val;
}
/**
* Whether we can include value of this time in string list, e.g.
* "${x} ${y}"
* @param type
* @return
*/
public static boolean supportsStringList(Type type) {
// Can't escape these types correctly
List<Type> badTypes = Arrays.asList(Types.V_STRING, Types.V_BLOB);
for (Type t: badTypes) {
if (type.assignableTo(t)) {
return false;
}
}
return true;
}
public static boolean representationIsTclList(Type type) {
// TODO: treat handles as lists?
if (Types.isFile(type) || Types.isStructLocal(type) ||
Types.isFileVal(type) || Types.isContainerLocal(type)) {
return true;
}
return false;
}
public static List<Expression> varsToExpr(List<Var> inputs) {
List<Expression> res = new ArrayList<Expression>(inputs.size());
for (Var in: inputs) {
res.add(varToExpr(in));
}
return res;
}
public static TclList tclListOfVariables(List<Var> inputs) {
TclList result = new TclList();
for (Var v: inputs)
result.add(varToExpr(v));
return result;
}
public static TclList tclListOfArgs(List<Arg> inputs) {
TclList result = new TclList();
for (Arg a: inputs)
result.add(argToExpr(a));
return result;
}
/**
* Try to pack list of expressions into a string that is a valid
* tcl list
* Fallback to list if we don't know how to do escaping correctly
* @param ruleTokens
* @return
*/
public static Expression tclStringAsList(List<Expression> ruleTokens) {
boolean canUseString = true;
for (Expression tok: ruleTokens) {
if (!tok.supportsStringList()) {
canUseString = false;
break;
}
}
if (canUseString) {
return new TclString(ruleTokens, ExprContext.LIST_STRING);
} else {
return new TclList(ruleTokens);
}
}
}
|
Pass ADLB handles correctly.
git-svn-id: 0c5512015aa96f7d3f5c3ad598bd98edc52008b1@11088 dc4e9af1-7f46-4ead-bba6-71afc04862de
|
code/src/exm/stc/tclbackend/TclUtil.java
|
Pass ADLB handles correctly.
|
|
Java
|
bsd-2-clause
|
668f5b2e2867939d15e157d515d31a49bfaec522
| 0
|
iron-io/iron_mq_java,iron-io/iron_mq_java
|
package io.iron.ironmq;
import java.io.IOException;
import java.io.Reader;
import com.google.gson.Gson;
/**
* The Queue class represents a specific IronMQ queue bound to a client.
*/
public class Queue {
final private Client client;
final private String name;
public Queue(Client client, String name) {
this.client = client;
this.name = name;
}
/**
* Retrieves a Message from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
*
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Message get() throws IOException {
Messages msgs = get(1);
Message msg;
try {
msg = msgs.getMessage(0);
} catch (IndexOutOfBoundsException e) {
throw new EmptyQueueException();
}
return msg;
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages) throws IOException {
return get(numberOfMessages, 120);
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @param timeout timeout in seconds.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages, int timeout) throws IOException {
if (numberOfMessages < 0 || numberOfMessages > 100) {
throw new IllegalArgumentException("numberOfMessages has to be within 1..100");
}
Reader reader = client.get("queues/" + name + "/messages?n="+numberOfMessages+"&timeout=" + timeout);
Gson gson = new Gson();
return gson.fromJson(reader, Messages.class);
}
/**
* Deletes a Message from the queue.
*
* @param id The ID of the message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(String id) throws IOException {
client.delete("queues/" + name + "/messages/" + id);
}
/**
* Deletes a Message from the queue.
*
* @param msg The message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(Message msg) throws IOException {
deleteMessage(msg.getId());
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg) throws IOException {
return push(msg, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout) throws IOException {
return push(msg, timeout, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay) throws IOException {
return push(msg, timeout, delay, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @param expiresIn The message's expiration offset in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay, long expiresIn) throws IOException {
Message message = new Message();
message.setBody(msg);
message.setTimeout(timeout);
message.setDelay(delay);
message.setExpiresIn(expiresIn);
Messages msgs = new Messages(message);
Gson gson = new Gson();
String body = gson.toJson(msgs);
Reader reader = client.post("queues/" + name + "/messages", body);
Ids ids = gson.fromJson(reader, Ids.class);
return ids.getId(0);
}
/**
* Clears the queue off all messages
* @param queue the name of the queue
* @throws IOException
*/
public void clear() throws IOException {
client.post("queues/"+name+"/clear", "");
}
}
|
src/main/java/io/iron/ironmq/Queue.java
|
package io.iron.ironmq;
import java.io.IOException;
import java.io.Reader;
import com.google.gson.Gson;
/**
* The Queue class represents a specific IronMQ queue bound to a client.
*/
public class Queue {
final private Client client;
final private String name;
public Queue(Client client, String name) {
this.client = client;
this.name = name;
}
/**
* Retrieves a Message from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
*
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Message get() throws IOException {
Messages msgs = get(1);
Message msg;
try {
msg = msgs.getMessage(0);
} catch (IndexOutOfBoundsException e) {
throw new EmptyQueueException();
}
return msg;
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages) throws IOException {
return get(numberOfMessages, 120);
}
/**
* Retrieves Messages from the queue. If there are no items on the queue, an
* EmptyQueueException is thrown.
* @param numberOfMessages The number of messages to receive. Max. is 100.
* @param timeout timeout in seconds.
* @throws EmptyQueueException If the queue is empty.
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public Messages get(int numberOfMessages, int timeout) throws IOException {
if (numberOfMessages < 0 || numberOfMessages > 100) {
throw new IllegalArgumentException("numberOfMessages has to be within 1..100");
}
Reader reader = client.get("queues/" + name + "/messages?n="+numberOfMessages+"&timeout=" + timeout);
Gson gson = new Gson();
return gson.fromJson(reader, Messages.class);
}
/**
* Deletes a Message from the queue.
*
* @param id The ID of the message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(String id) throws IOException {
client.delete("queues/" + name + "/messages/" + id);
}
/**
* Deletes a Message from the queue.
*
* @param msg The message to delete.
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public void deleteMessage(Message msg) throws IOException {
deleteMessage(msg.getId());
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg) throws IOException {
return push(msg, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout) throws IOException {
return push(msg, timeout, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay) throws IOException {
return push(msg, timeout, delay, 0);
}
/**
* Pushes a message onto the queue.
*
* @param msg The body of the message to push.
* @param timeout The message's timeout in seconds.
* @param delay The message's delay in seconds.
* @param expiresIn The message's expiration offset in seconds.
* @return The new message's ID
*
* @throws HTTPException If the IronMQ service returns a status other than 200 OK.
* @throws IOException If there is an error accessing the IronMQ server.
*/
public String push(String msg, long timeout, long delay, long expiresIn) throws IOException {
Message message = new Message();
message.setBody(msg);
message.setTimeout(timeout);
message.setDelay(delay);
message.setExpiresIn(expiresIn);
Messages msgs = new Messages(message);
Gson gson = new Gson();
String body = gson.toJson(msgs);
Reader reader = client.post("queues/" + name + "/messages", body);
Ids ids = gson.fromJson(reader, Ids.class);
return ids.getId(0);
}
}
|
added clear operation
|
src/main/java/io/iron/ironmq/Queue.java
|
added clear operation
|
|
Java
|
bsd-3-clause
|
c7a070618457a69599798621ce74348adb1177c4
| 0
|
tomka/imglib
|
/**
* Copyright (c) 2009--2010, Stephan Preibisch & Johannes Schindelin
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer. Redistributions in binary
* form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials
* provided with the distribution. Neither the name of the Fiji project nor
* the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @author Johannes Schindelin & Stephan Preibisch
*/
package mpicbg.imglib.container.imageplus;
import java.util.ArrayList;
import ij.ImagePlus;
import mpicbg.imglib.container.Container3D;
import mpicbg.imglib.container.PixelGridContainerImpl;
import mpicbg.imglib.container.basictypecontainer.array.ArrayDataAccess;
import mpicbg.imglib.cursor.Cursor;
import mpicbg.imglib.cursor.LocalizableByDimCursor;
import mpicbg.imglib.cursor.LocalizableCursor;
import mpicbg.imglib.cursor.LocalizablePlaneCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizableByDimCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizableByDimOutsideCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizableCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizablePlaneCursor;
import mpicbg.imglib.exception.ImgLibException;
import mpicbg.imglib.image.Image;
import mpicbg.imglib.outside.OutsideStrategyFactory;
import mpicbg.imglib.type.Type;
public class ImagePlusContainer<T extends Type<T>, A extends ArrayDataAccess<A>> extends PixelGridContainerImpl<T,A> implements Container3D<T,A>
{
final ImagePlusContainerFactory factory;
final int width, height, depth;
final ArrayList<A> mirror;
ImagePlusContainer( final ImagePlusContainerFactory factory, final int[] dim, final int entitiesPerPixel )
{
super( factory, dim, entitiesPerPixel );
this.factory = factory;
this.width = dim[ 0 ];
if( dim.length < 2 )
this.height = 1;
else
this.height = dim[ 1 ];
if ( dim.length < 3 )
this.depth = 1;
else
this.depth = dim[ 2 ];
mirror = new ArrayList<A>( depth );
}
ImagePlusContainer( final ImagePlusContainerFactory factory, final A creator, final int[] dim, final int entitiesPerPixel )
{
this( factory, dim, entitiesPerPixel );
for ( int i = 0; i < depth; ++i )
mirror.add( creator.createArray( width * height * entitiesPerPixel ));
}
public ImagePlus getImagePlus() throws ImgLibException
{
throw new ImgLibException( this, "has no ImagePlus instance, it is not a standard type of ImagePlus" );
}
@Override
public A update( final Cursor<?> c ) { return mirror.get( c.getStorageIndex() ); }
protected static int[] getCorrectDimensionality( final ImagePlus imp )
{
int numDimensions = 3;
if ( imp.getStackSize() == 1 )
--numDimensions;
if ( imp.getHeight() == 1 )
--numDimensions;
final int[] dim = new int[ numDimensions ];
dim[ 0 ] = imp.getWidth();
if ( numDimensions >= 2 )
dim[ 1 ] = imp.getHeight();
if ( numDimensions == 3 )
dim[ 2 ] = imp.getStackSize();
return dim;
}
@Override
public int getWidth() { return width; }
@Override
public int getHeight() { return height; }
@Override
public int getDepth() { return depth; }
public final int getPos( final int[] l )
{
if ( numDimensions > 1 )
return l[ 1 ] * width + l[ 0 ];
else
return l[ 0 ];
}
public Cursor<T> createCursor( T type, Image<T> image )
{
return new ImagePlusCursor<T>( this, image, type );
}
public LocalizableCursor<T> createLocalizableCursor( T type, Image<T> image )
{
return new ImagePlusLocalizableCursor<T>( this, image, type );
}
;
public LocalizablePlaneCursor<T> createLocalizablePlaneCursor( T type, Image<T> image )
{
return new ImagePlusLocalizablePlaneCursor<T>( this, image, type );
}
;
public LocalizableByDimCursor<T> createLocalizableByDimCursor( T type, Image<T> image )
{
return new ImagePlusLocalizableByDimCursor<T>( this, image, type );
}
;
public LocalizableByDimCursor<T> createLocalizableByDimCursor( T type, Image<T> image, OutsideStrategyFactory<T> outsideFactory )
{
return new ImagePlusLocalizableByDimOutsideCursor<T>( this, image, type, outsideFactory );
}
public ImagePlusContainerFactory getFactory() { return factory; }
@Override
public void close()
{
for ( final A array : mirror )
array.close();
}
}
|
mpicbg/imglib/container/imageplus/ImagePlusContainer.java
|
/**
* Copyright (c) 2009--2010, Stephan Preibisch & Johannes Schindelin
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer. Redistributions in binary
* form must reproduce the above copyright notice, this list of conditions and
* the following disclaimer in the documentation and/or other materials
* provided with the distribution. Neither the name of the Fiji project nor
* the names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @author Johannes Schindelin & Stephan Preibisch
*/
package mpicbg.imglib.container.imageplus;
import java.util.ArrayList;
import ij.ImagePlus;
import mpicbg.imglib.container.Container3D;
import mpicbg.imglib.container.PixelGridContainerImpl;
import mpicbg.imglib.container.basictypecontainer.array.ArrayDataAccess;
import mpicbg.imglib.cursor.Cursor;
import mpicbg.imglib.cursor.LocalizableByDimCursor;
import mpicbg.imglib.cursor.LocalizableCursor;
import mpicbg.imglib.cursor.LocalizablePlaneCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizableByDimCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizableByDimOutsideCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizableCursor;
import mpicbg.imglib.cursor.imageplus.ImagePlusLocalizablePlaneCursor;
import mpicbg.imglib.exception.ImgLibException;
import mpicbg.imglib.image.Image;
import mpicbg.imglib.outside.OutsideStrategyFactory;
import mpicbg.imglib.type.Type;
public class ImagePlusContainer<T extends Type<T>, A extends ArrayDataAccess<A>> extends PixelGridContainerImpl<T,A> implements Container3D<T,A>
{
final ImagePlusContainerFactory factory;
final int width, height, depth;
final ArrayList<A> mirror;
ImagePlusContainer( final ImagePlusContainerFactory factory, final int[] dim, final int entitiesPerPixel )
{
super( factory, dim, entitiesPerPixel );
this.factory = factory;
this.width = dim[ 0 ];
if( dim.length < 2 )
this.height = 1;
else
this.height = dim[ 1 ];
if ( dim.length < 3 )
this.depth = 1;
else
this.depth = dim[ 2 ];
mirror = new ArrayList<A>( depth );
}
ImagePlusContainer( final ImagePlusContainerFactory factory, final A creator, final int[] dim, final int entitiesPerPixel )
{
this( factory, dim, entitiesPerPixel );
for ( int i = 0; i < depth; ++i )
mirror.add( creator.createArray( width * height ));
}
public ImagePlus getImagePlus() throws ImgLibException
{
throw new ImgLibException( this, "has no ImagePlus instance, it is not a standard type of ImagePlus" );
}
@Override
public A update( final Cursor<?> c ) { return mirror.get( c.getStorageIndex() ); }
protected static int[] getCorrectDimensionality( final ImagePlus imp )
{
int numDimensions = 3;
if ( imp.getStackSize() == 1 )
--numDimensions;
if ( imp.getHeight() == 1 )
--numDimensions;
final int[] dim = new int[ numDimensions ];
dim[ 0 ] = imp.getWidth();
if ( numDimensions >= 2 )
dim[ 1 ] = imp.getHeight();
if ( numDimensions == 3 )
dim[ 2 ] = imp.getStackSize();
return dim;
}
@Override
public int getWidth() { return width; }
@Override
public int getHeight() { return height; }
@Override
public int getDepth() { return depth; }
public final int getPos( final int[] l )
{
if ( numDimensions > 1 )
return l[ 1 ] * width + l[ 0 ];
else
return l[ 0 ];
}
public Cursor<T> createCursor( T type, Image<T> image )
{
return new ImagePlusCursor<T>( this, image, type );
}
public LocalizableCursor<T> createLocalizableCursor( T type, Image<T> image )
{
return new ImagePlusLocalizableCursor<T>( this, image, type );
}
;
public LocalizablePlaneCursor<T> createLocalizablePlaneCursor( T type, Image<T> image )
{
return new ImagePlusLocalizablePlaneCursor<T>( this, image, type );
}
;
public LocalizableByDimCursor<T> createLocalizableByDimCursor( T type, Image<T> image )
{
return new ImagePlusLocalizableByDimCursor<T>( this, image, type );
}
;
public LocalizableByDimCursor<T> createLocalizableByDimCursor( T type, Image<T> image, OutsideStrategyFactory<T> outsideFactory )
{
return new ImagePlusLocalizableByDimOutsideCursor<T>( this, image, type, outsideFactory );
}
public ImagePlusContainerFactory getFactory() { return factory; }
@Override
public void close()
{
for ( final A array : mirror )
array.close();
}
}
|
Fixed another entitiesPerPixel bug in ImagePlusContainer
(cherry picked from commit 365151869ee20cac8cf14d3329c622d43f26d540)
|
mpicbg/imglib/container/imageplus/ImagePlusContainer.java
|
Fixed another entitiesPerPixel bug in ImagePlusContainer (cherry picked from commit 365151869ee20cac8cf14d3329c622d43f26d540)
|
|
Java
|
mit
|
65c08c6233d48a86f54c129354791b9b34385b3b
| 0
|
takumaniihara/kikisen_app
|
package jp.gr.java_conf.nippy.kikisen;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class MainActivity extends Activity {
private static final String TAG = MainActivity.class.getName();
TextView tvIP;
EditText etSendString;
Button btSend;
Button btNo;
Button btYes;
Button btEnemy;
Button btDirection;
Button btDistance;
Button btNumber;
BouyomiChan4J bouyomi;
SharedPreferences pref;
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.activity_main);
tvIP = (TextView) findViewById(R.id.tvIP);
etSendString = (EditText) findViewById(R.id.etSendString);
btSend = (Button) findViewById(R.id.btSend);
etSendString.setEnabled(true);
btNo = (Button) findViewById(R.id.btNo);
btYes = (Button) findViewById(R.id.btYes);
btEnemy = (Button) findViewById(R.id.btEnemy);
btDirection = (Button) findViewById(R.id.btDirection);
btDistance = (Button) findViewById(R.id.btDistance);
btNumber = (Button) findViewById(R.id.btNumber);
pref = PreferenceManager.getDefaultSharedPreferences(this);
//SEND button
btSend.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk(etSendString.getText().toString());
etSendString.getEditableText().clear();
}
});
//enter pressed
etSendString.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER) && !(etSendString.getText().toString().equals(""))) {
talk(etSendString.getText().toString());
etSendString.getEditableText().clear();
return true;
}
return false;
}
});
//No button
btNo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("いいえ");
}
});
//Yes button
btYes.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("はい");
}
});
btYes.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
talk("ろんぐくりっく");
return true;
}
});
//Enemy button
btEnemy.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("てき");
}
});
//Direction button
btDirection.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//talk("ほういにゅうりょくがめんよてい");
//TODO enter direction
AlertDialog alertDialog = new AlertDialog.Builder(MainActivity.this).create(); //Read Update
alertDialog.setTitle("hi");
alertDialog.setMessage("comming soon (tm)");
alertDialog.setButton("Continue..", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// here you can add functions
}
});
alertDialog.show();
}
});
//Distance button
btDistance.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//talk("きょりにゅうりょくがめんよてい");
//TODO enter distance
AlertDialog alertDialog = new AlertDialog.Builder(MainActivity.this).create(); //Read Update
alertDialog.setTitle("hi");
alertDialog.setMessage("comming soon (tm)");
alertDialog.setButton("Continue..", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// here you can add functions
}
});
alertDialog.show();
}
});
//Number button
btNumber.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//talk("にんずうにゅうりょくがめんよてい");
//TODO enter number
AlertDialog alertDialog = new AlertDialog.Builder(MainActivity.this).create(); //Read Update
alertDialog.setTitle("hi");
alertDialog.setMessage("comming soon (tm)");
alertDialog.setButton("Continue..", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
// here you can add functions
}
});
alertDialog.show();
}
});
// SKIP button
Button btSkip = (Button) findViewById(R.id.btSkip);
btSkip.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
new Thread(new Runnable() {
public void run() {
bouyomi.clear();
bouyomi.skip();
}
}).start();
}
});
}
//send string to bouyomi-chan
private void talk(final String str) {
if (!(str.equals(""))) {
new Thread(new Runnable() {
public void run() {
bouyomi.talk(Integer.parseInt(pref.getString("list_preference_volume", "50")),
Integer.parseInt(pref.getString("list_preference_speed", "100")),
Integer.parseInt(pref.getString("list_preference_interval", "100")),
Integer.parseInt(pref.getString("list_preference_type", "0")),
str);
}
}).start();
}
}
//menu
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.optionsMenu_01:
Intent intent1 = new android.content.Intent(this, MainPreferenceActivity.class);
startActivity(intent1);
return true;
case R.id.optionsMenu_02:
Intent intent2 = new android.content.Intent(this, HowToUseActivity.class);
startActivity(intent2);
return true;
case R.id.optionsMenu_03:
Intent intent3 = new android.content.Intent(this, AboutThisAppActivity.class);
startActivity(intent3);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onStart() {
Log.v(TAG, "onStart");
super.onStart();
}
@Override
public void onStop() {
Log.v(TAG, "onStop");
super.onStop();
bouyomi = null;
}
@Override
public void onResume() {
Log.v(TAG, "onResume");
super.onResume();
new Thread(new Runnable() {
public void run() {
bouyomi = new BouyomiChan4J(pref.getString("edit_text_preference_ip", "127.0.0.1"), Integer.parseInt(pref.getString("edit_text_preference_port", "50001")));
}
}).start();
tvIP.setText("開始しました \nip:" + pref.getString("edit_text_preference_ip", "127.0.0.1") + "\nport:" + pref.getString("edit_text_preference_port", "50001")
+ "\nvolume:" + pref.getString("list_preference_volume", "50") + "\nspeed:" + pref.getString("list_preference_speed", "100")
+ "\ninterval:" + pref.getString("list_preference_interval", "100") + "\nvoice type:" + pref.getString("list_preference_type:", "0"));
}
@Override
public void onPause() {
Log.v(TAG, "onPause");
super.onPause();
}
}
|
app/src/main/java/jp/gr/java_conf/nippy/kikisen/MainActivity.java
|
package jp.gr.java_conf.nippy.kikisen;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
public class MainActivity extends Activity {
private static final String TAG = MainActivity.class.getName();
TextView tvIP;
EditText etSendString;
Button btSend;
Button btNo;
Button btYes;
Button btEnemy;
Button btDirection;
Button btDistance;
Button btNumber;
BouyomiChan4J bouyomi;
SharedPreferences pref;
/**
* Called when the activity is first created.
*/
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setRequestedOrientation (ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
setContentView(R.layout.activity_main);
tvIP = (TextView) findViewById(R.id.tvIP);
etSendString = (EditText) findViewById(R.id.etSendString);
btSend = (Button) findViewById(R.id.btSend);
etSendString.setEnabled(true);
btNo = (Button) findViewById(R.id.btNo);
btYes = (Button) findViewById(R.id.btYes);
btEnemy = (Button) findViewById(R.id.btEnemy);
btDirection = (Button) findViewById(R.id.btDirection);
btDistance = (Button) findViewById(R.id.btDistance);
btNumber = (Button) findViewById(R.id.btNumber);
pref = PreferenceManager.getDefaultSharedPreferences(this);
//SEND button
btSend.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk(etSendString.getText().toString());
etSendString.getEditableText().clear();
}
});
//enter pressed
etSendString.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER) && !(etSendString.getText().toString().equals(""))) {
talk(etSendString.getText().toString());
etSendString.getEditableText().clear();
return true;
}
return false;
}
});
//No button
btNo.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("いいえ");
}
});
//Yes button
btYes.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("はい");
}
});
//Enemy button
btEnemy.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("てき");
}
});
//Direction button
btDirection.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("ほういにゅうりょくがめんよてい");
//TODO enter direction
}
});
//Distance button
btDistance.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("きょりにゅうりょくがめんよてい");
//TODO enter distance
}
});
//Number button
btNumber.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
talk("にんずうにゅうりょくがめんよてい");
//TODO enter number
}
});
// SKIP button
Button btSkip = (Button) findViewById(R.id.btSkip);
btSkip.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
new Thread(new Runnable() {
public void run() {
bouyomi.clear();
bouyomi.skip();
}
}).start();
}
});
}
//send string to bouyomi-chan
private void talk(final String str) {
if (!(str.equals(""))) {
new Thread(new Runnable() {
public void run() {
bouyomi.talk(Integer.parseInt(pref.getString("list_preference_volume","50")),
Integer.parseInt(pref.getString("list_preference_speed","100")),
Integer.parseInt(pref.getString("list_preference_interval","100")),
Integer.parseInt(pref.getString("list_preference_type","0")),
str);
}
}).start();
}
}
//menu
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.optionsMenu_01:
Intent intent1 = new android.content.Intent(this, MainPreferenceActivity.class);
startActivity(intent1);
return true;
case R.id.optionsMenu_02:
Intent intent2 = new android.content.Intent(this, HowToUseActivity.class);
startActivity(intent2);
return true;
case R.id.optionsMenu_03:
Intent intent3 = new android.content.Intent(this, AboutThisAppActivity.class);
startActivity(intent3);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
@Override
public void onStart() {
Log.v(TAG, "onStart");
super.onStart();
}
@Override
public void onStop() {
Log.v(TAG, "onStop");
super.onStop();
bouyomi = null;
}
@Override
public void onResume() {
Log.v(TAG, "onResume");
super.onResume();
new Thread(new Runnable() {
public void run() {
bouyomi = new BouyomiChan4J(pref.getString("edit_text_preference_ip", "127.0.0.1"), Integer.parseInt(pref.getString("edit_text_preference_port", "50001")));
}
}).start();
tvIP.setText("開始しました \nip:" + pref.getString("edit_text_preference_ip", "127.0.0.1") + "\nport:" + pref.getString("edit_text_preference_port", "50001")
+ "\nvolume:" + pref.getString("list_preference_volume","50") + "\nspeed:"+ pref.getString("list_preference_speed","100")
+ "\ninterval:"+ pref.getString("list_preference_interval","100") + "\nvoice type:" + pref.getString("list_preference_type:","0"));
}
@Override
public void onPause() {
Log.v(TAG, "onPause");
super.onPause();
}
}
|
trying to add popup
|
app/src/main/java/jp/gr/java_conf/nippy/kikisen/MainActivity.java
|
trying to add popup
|
|
Java
|
mit
|
26dc905ff23bd69711bf48a2a46b892db2fb384c
| 0
|
Jimilian/jenkins,hplatou/jenkins,wuwen5/jenkins,tangkun75/jenkins,batmat/jenkins,escoem/jenkins,fbelzunc/jenkins,fbelzunc/jenkins,Vlatombe/jenkins,wuwen5/jenkins,tfennelly/jenkins,jenkinsci/jenkins,tfennelly/jenkins,andresrc/jenkins,amuniz/jenkins,Jimilian/jenkins,kohsuke/hudson,ndeloof/jenkins,Jimilian/jenkins,Vlatombe/jenkins,fbelzunc/jenkins,dariver/jenkins,ErikVerheul/jenkins,godfath3r/jenkins,v1v/jenkins,ndeloof/jenkins,oleg-nenashev/jenkins,DanielWeber/jenkins,recena/jenkins,escoem/jenkins,Jochen-A-Fuerbacher/jenkins,stephenc/jenkins,amuniz/jenkins,tangkun75/jenkins,Jochen-A-Fuerbacher/jenkins,rsandell/jenkins,jenkinsci/jenkins,Ykus/jenkins,fbelzunc/jenkins,pjanouse/jenkins,stephenc/jenkins,tfennelly/jenkins,batmat/jenkins,kohsuke/hudson,tangkun75/jenkins,DanielWeber/jenkins,escoem/jenkins,hplatou/jenkins,godfath3r/jenkins,bkmeneguello/jenkins,oleg-nenashev/jenkins,fbelzunc/jenkins,dariver/jenkins,recena/jenkins,DanielWeber/jenkins,sathiya-mit/jenkins,damianszczepanik/jenkins,v1v/jenkins,viqueen/jenkins,Jimilian/jenkins,jenkinsci/jenkins,escoem/jenkins,jenkinsci/jenkins,v1v/jenkins,ErikVerheul/jenkins,MarkEWaite/jenkins,Ykus/jenkins,hplatou/jenkins,daniel-beck/jenkins,stephenc/jenkins,MarkEWaite/jenkins,wuwen5/jenkins,daniel-beck/jenkins,ErikVerheul/jenkins,ikedam/jenkins,amuniz/jenkins,Jochen-A-Fuerbacher/jenkins,jenkinsci/jenkins,tfennelly/jenkins,ikedam/jenkins,hplatou/jenkins,v1v/jenkins,damianszczepanik/jenkins,Jimilian/jenkins,rsandell/jenkins,Vlatombe/jenkins,andresrc/jenkins,kohsuke/hudson,damianszczepanik/jenkins,ErikVerheul/jenkins,recena/jenkins,aldaris/jenkins,MarkEWaite/jenkins,MarkEWaite/jenkins,amuniz/jenkins,batmat/jenkins,godfath3r/jenkins,hplatou/jenkins,MarkEWaite/jenkins,kohsuke/hudson,damianszczepanik/jenkins,patbos/jenkins,patbos/jenkins,ikedam/jenkins,DanielWeber/jenkins,MarkEWaite/jenkins,jenkinsci/jenkins,azweb76/jenkins,wuwen5/jenkins,escoem/jenkins,MarkEWaite/jenkins,rsandell/jenkins,tangkun75/jenkins,stephenc/jenkins,tfennelly/jenkins,rsandell/jenkins,amuniz/jenkins,sathiya-mit/jenkins,amuniz/jenkins,azweb76/jenkins,Ykus/jenkins,recena/jenkins,recena/jenkins,viqueen/jenkins,recena/jenkins,aldaris/jenkins,Jochen-A-Fuerbacher/jenkins,ikedam/jenkins,bkmeneguello/jenkins,kohsuke/hudson,Jimilian/jenkins,Ykus/jenkins,ikedam/jenkins,kohsuke/hudson,tfennelly/jenkins,bkmeneguello/jenkins,viqueen/jenkins,Ykus/jenkins,Vlatombe/jenkins,ndeloof/jenkins,godfath3r/jenkins,jenkinsci/jenkins,tangkun75/jenkins,pjanouse/jenkins,aldaris/jenkins,stephenc/jenkins,amuniz/jenkins,sathiya-mit/jenkins,escoem/jenkins,daniel-beck/jenkins,ErikVerheul/jenkins,pjanouse/jenkins,dariver/jenkins,DanielWeber/jenkins,damianszczepanik/jenkins,dariver/jenkins,andresrc/jenkins,viqueen/jenkins,Vlatombe/jenkins,damianszczepanik/jenkins,stephenc/jenkins,batmat/jenkins,ndeloof/jenkins,wuwen5/jenkins,viqueen/jenkins,batmat/jenkins,bkmeneguello/jenkins,dariver/jenkins,oleg-nenashev/jenkins,MarkEWaite/jenkins,ErikVerheul/jenkins,v1v/jenkins,wuwen5/jenkins,hplatou/jenkins,rsandell/jenkins,rsandell/jenkins,patbos/jenkins,tangkun75/jenkins,rsandell/jenkins,v1v/jenkins,godfath3r/jenkins,aldaris/jenkins,pjanouse/jenkins,recena/jenkins,escoem/jenkins,ikedam/jenkins,ErikVerheul/jenkins,hplatou/jenkins,ndeloof/jenkins,daniel-beck/jenkins,ikedam/jenkins,sathiya-mit/jenkins,daniel-beck/jenkins,viqueen/jenkins,oleg-nenashev/jenkins,daniel-beck/jenkins,Jimilian/jenkins,viqueen/jenkins,azweb76/jenkins,bkmeneguello/jenkins,kohsuke/hudson,daniel-beck/jenkins,Jochen-A-Fuerbacher/jenkins,andresrc/jenkins,damianszczepanik/jenkins,batmat/jenkins,dariver/jenkins,azweb76/jenkins,DanielWeber/jenkins,patbos/jenkins,oleg-nenashev/jenkins,sathiya-mit/jenkins,bkmeneguello/jenkins,azweb76/jenkins,Ykus/jenkins,kohsuke/hudson,godfath3r/jenkins,bkmeneguello/jenkins,patbos/jenkins,batmat/jenkins,rsandell/jenkins,godfath3r/jenkins,sathiya-mit/jenkins,azweb76/jenkins,fbelzunc/jenkins,ikedam/jenkins,aldaris/jenkins,Vlatombe/jenkins,jenkinsci/jenkins,wuwen5/jenkins,Ykus/jenkins,dariver/jenkins,Vlatombe/jenkins,Jochen-A-Fuerbacher/jenkins,sathiya-mit/jenkins,andresrc/jenkins,patbos/jenkins,ndeloof/jenkins,tfennelly/jenkins,azweb76/jenkins,pjanouse/jenkins,andresrc/jenkins,damianszczepanik/jenkins,aldaris/jenkins,ndeloof/jenkins,pjanouse/jenkins,patbos/jenkins,stephenc/jenkins,tangkun75/jenkins,oleg-nenashev/jenkins,DanielWeber/jenkins,oleg-nenashev/jenkins,v1v/jenkins,andresrc/jenkins,aldaris/jenkins,daniel-beck/jenkins,pjanouse/jenkins,fbelzunc/jenkins,Jochen-A-Fuerbacher/jenkins
|
/*
* The MIT License
*
* Copyright (c) 2004-2012, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Daniel Dyer, Red Hat, Inc., Tom Huybrechts, Romain Seguy, Yahoo! Inc.,
* Darek Ostolski, CloudBees, Inc.
*
* Copyright (c) 2012, Martin Schroeder, Intel Mobile Communications GmbH
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import com.jcraft.jzlib.GZIPInputStream;
import com.thoughtworks.xstream.XStream;
import hudson.AbortException;
import hudson.BulkChange;
import hudson.EnvVars;
import hudson.ExtensionList;
import hudson.ExtensionPoint;
import hudson.FeedAdapter;
import hudson.Functions;
import hudson.console.AnnotatedLargeText;
import hudson.console.ConsoleLogFilter;
import hudson.console.ConsoleNote;
import hudson.console.ModelHyperlinkNote;
import hudson.console.PlainTextConsoleOutputStream;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.StandardOpenOption;
import jenkins.util.SystemProperties;
import hudson.Util;
import hudson.XmlFile;
import hudson.cli.declarative.CLIMethod;
import hudson.model.Descriptor.FormException;
import hudson.model.listeners.RunListener;
import hudson.model.listeners.SaveableListener;
import hudson.model.queue.Executables;
import hudson.model.queue.SubTask;
import hudson.search.SearchIndexBuilder;
import hudson.security.ACL;
import hudson.security.AccessControlled;
import hudson.security.Permission;
import hudson.security.PermissionGroup;
import hudson.security.PermissionScope;
import hudson.tasks.BuildWrapper;
import hudson.util.FormApply;
import hudson.util.LogTaskListener;
import hudson.util.ProcessTree;
import hudson.util.XStream2;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import java.io.Reader;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import static java.util.logging.Level.*;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import jenkins.model.ArtifactManager;
import jenkins.model.ArtifactManagerConfiguration;
import jenkins.model.ArtifactManagerFactory;
import jenkins.model.BuildDiscarder;
import jenkins.model.Jenkins;
import jenkins.model.JenkinsLocationConfiguration;
import jenkins.model.PeepholePermalink;
import jenkins.model.RunAction2;
import jenkins.model.StandardArtifactManager;
import jenkins.model.lazy.BuildReference;
import jenkins.model.lazy.LazyBuildMixIn;
import jenkins.util.VirtualFile;
import jenkins.util.io.OnMaster;
import net.sf.json.JSONObject;
import org.acegisecurity.AccessDeniedException;
import org.acegisecurity.Authentication;
import org.apache.commons.io.IOUtils;
import org.apache.commons.jelly.XMLOutput;
import org.apache.commons.lang.ArrayUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.interceptor.RequirePOST;
/**
* A particular execution of {@link Job}.
*
* <p>
* Custom {@link Run} type is always used in conjunction with
* a custom {@link Job} type, so there's no separate registration
* mechanism for custom {@link Run} types.
*
* @author Kohsuke Kawaguchi
* @see RunListener
*/
@ExportedBean
public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,RunT>>
extends Actionable implements ExtensionPoint, Comparable<RunT>, AccessControlled, PersistenceRoot, DescriptorByNameOwner, OnMaster {
/**
* The original {@link Queue.Item#getId()} has not yet been mapped onto the {@link Run} instance.
* @since 1.601
*/
public static final long QUEUE_ID_UNKNOWN = -1;
protected transient final @Nonnull JobT project;
/**
* Build number.
*
* <p>
* In earlier versions < 1.24, this number is not unique nor continuous,
* but going forward, it will, and this really replaces the build id.
*/
public transient /*final*/ int number;
/**
* The original Queue task ID from where this Run instance originated.
*/
private long queueId = Run.QUEUE_ID_UNKNOWN;
/**
* Previous build. Can be null.
* TODO JENKINS-22052 this is not actually implemented any more
*
* External code should use {@link #getPreviousBuild()}
*/
@Restricted(NoExternalUse.class)
protected volatile transient RunT previousBuild;
/**
* Next build. Can be null.
*
* External code should use {@link #getNextBuild()}
*/
@Restricted(NoExternalUse.class)
protected volatile transient RunT nextBuild;
/**
* Pointer to the next younger build in progress. This data structure is lazily updated,
* so it may point to the build that's already completed. This pointer is set to 'this'
* if the computation determines that everything earlier than this build is already completed.
*/
/* does not compile on JDK 7: private*/ volatile transient RunT previousBuildInProgress;
/** ID as used for historical build records; otherwise null. */
private @CheckForNull String id;
/**
* When the build is scheduled.
*/
protected /*final*/ long timestamp;
/**
* When the build has started running.
*
* For historical reasons, 0 means no value is recorded.
*
* @see #getStartTimeInMillis()
*/
private long startTime;
/**
* The build result.
* This value may change while the state is in {@link Run.State#BUILDING}.
*/
protected volatile Result result;
/**
* Human-readable description. Can be null.
*/
protected volatile String description;
/**
* Human-readable name of this build. Can be null.
* If non-null, this text is displayed instead of "#NNN", which is the default.
* @since 1.390
*/
private volatile String displayName;
/**
* The current build state.
*/
private volatile transient State state;
private static enum State {
/**
* Build is created/queued but we haven't started building it.
*/
NOT_STARTED,
/**
* Build is in progress.
*/
BUILDING,
/**
* Build is completed now, and the status is determined,
* but log files are still being updated.
*
* The significance of this state is that Jenkins
* will now see this build as completed. Things like
* "triggering other builds" requires this as pre-condition.
* See JENKINS-980.
*/
POST_PRODUCTION,
/**
* Build is completed now, and log file is closed.
*/
COMPLETED
}
/**
* Number of milli-seconds it took to run this build.
*/
protected long duration;
/**
* Charset in which the log file is written.
* For compatibility reason, this field may be null.
* For persistence, this field is string and not {@link Charset}.
*
* @see #getCharset()
* @since 1.257
*/
protected String charset;
/**
* Keeps this log entries.
*/
private boolean keepLog;
/**
* If the build is in progress, remember {@link RunExecution} that's running it.
* This field is not persisted.
*/
private volatile transient RunExecution runner;
/**
* Artifact manager associated with this build, if any.
* @since 1.532
*/
private @CheckForNull ArtifactManager artifactManager;
/**
* Creates a new {@link Run}.
* @param job Owner job
*/
protected Run(@Nonnull JobT job) throws IOException {
this(job, System.currentTimeMillis());
this.number = project.assignBuildNumber();
LOGGER.log(FINER, "new {0} @{1}", new Object[] {this, hashCode()});
}
/**
* Constructor for creating a {@link Run} object in
* an arbitrary state.
* {@link #number} must be set manually.
* <p>May be used in a {@link SubTask#createExecutable} (instead of calling {@link LazyBuildMixIn#newBuild}).
* For example, {@code MatrixConfiguration.newBuild} does this
* so that the {@link #timestamp} as well as {@link #number} are shared with the parent build.
*/
protected Run(@Nonnull JobT job, @Nonnull Calendar timestamp) {
this(job,timestamp.getTimeInMillis());
}
/** @see #Run(Job, Calendar) */
protected Run(@Nonnull JobT job, long timestamp) {
this.project = job;
this.timestamp = timestamp;
this.state = State.NOT_STARTED;
}
/**
* Loads a run from a log file.
*/
protected Run(@Nonnull JobT project, @Nonnull File buildDir) throws IOException {
this.project = project;
this.previousBuildInProgress = _this(); // loaded builds are always completed
number = Integer.parseInt(buildDir.getName());
reload();
}
/**
* Reloads the build record from disk.
*
* @since 1.410
*/
public void reload() throws IOException {
this.state = State.COMPLETED;
// TODO ABORTED would perhaps make more sense than FAILURE:
this.result = Result.FAILURE; // defensive measure. value should be overwritten by unmarshal, but just in case the saved data is inconsistent
getDataFile().unmarshal(this); // load the rest of the data
if (state == State.COMPLETED) {
LOGGER.log(FINER, "reload {0} @{1}", new Object[] {this, hashCode()});
} else {
LOGGER.log(WARNING, "reload {0} @{1} with anomalous state {2}", new Object[] {this, hashCode(), state});
}
// not calling onLoad upon reload. partly because we don't want to call that from Run constructor,
// and partly because some existing use of onLoad isn't assuming that it can be invoked multiple times.
}
/**
* Called after the build is loaded and the object is added to the build list.
*/
@SuppressWarnings("deprecation")
protected void onLoad() {
for (Action a : getAllActions()) {
if (a instanceof RunAction2) {
try {
((RunAction2) a).onLoad(this);
} catch (RuntimeException x) {
LOGGER.log(WARNING, "failed to load " + a + " from " + getDataFile(), x);
removeAction(a); // if possible; might be in an inconsistent state
}
} else if (a instanceof RunAction) {
((RunAction) a).onLoad();
}
}
if (artifactManager != null) {
artifactManager.onLoad(this);
}
}
/**
* Return all transient actions associated with this build.
*
* @return the list can be empty but never null. read only.
* @deprecated Use {@link #getAllActions} instead.
*/
@Deprecated
public List<Action> getTransientActions() {
List<Action> actions = new ArrayList<Action>();
for (TransientBuildActionFactory factory: TransientBuildActionFactory.all()) {
for (Action created : factory.createFor(this)) {
if (created == null) {
LOGGER.log(WARNING, "null action added by {0}", factory);
continue;
}
actions.add(created);
}
}
return Collections.unmodifiableList(actions);
}
/**
* {@inheritDoc}
* A {@link RunAction2} is handled specially.
*/
@SuppressWarnings("deprecation")
@Override
public void addAction(@Nonnull Action a) {
super.addAction(a);
if (a instanceof RunAction2) {
((RunAction2) a).onAttached(this);
} else if (a instanceof RunAction) {
((RunAction) a).onAttached(this);
}
}
/**
* Obtains 'this' in a more type safe signature.
*/
@SuppressWarnings({"unchecked"})
protected @Nonnull RunT _this() {
return (RunT)this;
}
/**
* Ordering based on build numbers.
* If numbers are equal order based on names of parent projects.
*/
public int compareTo(@Nonnull RunT that) {
final int res = this.number - that.number;
if (res == 0)
return this.getParent().getFullName().compareTo(that.getParent().getFullName());
return res;
}
/**
* Get the {@link Queue.Item#getId()} of the original queue item from where this Run instance
* originated.
* @return The queue item ID.
* @since 1.601
*/
@Exported
public long getQueueId() {
return queueId;
}
/**
* Set the queue item ID.
* <p>
* Mapped from the {@link Queue.Item#getId()}.
* @param queueId The queue item ID.
*/
@Restricted(NoExternalUse.class)
public void setQueueId(long queueId) {
this.queueId = queueId;
}
/**
* Returns the build result.
*
* <p>
* When a build is {@link #isBuilding() in progress}, this method
* returns an intermediate result.
* @return The status of the build, if it has completed or some build step has set a status; may be null if the build is ongoing.
*/
@Exported
public @CheckForNull Result getResult() {
return result;
}
/**
* Sets the {@link #getResult} of this build.
* Has no effect when the result is already set and worse than the proposed result.
* May only be called after the build has started and before it has moved into post-production
* (normally meaning both {@link #isInProgress} and {@link #isBuilding} are true).
* @param r the proposed new result
* @throws IllegalStateException if the build has not yet started, is in post-production, or is complete
*/
public void setResult(@Nonnull Result r) {
if (state != State.BUILDING) {
throw new IllegalStateException("cannot change build result while in " + state);
}
// result can only get worse
if (result==null || r.isWorseThan(result)) {
result = r;
LOGGER.log(FINE, this + " in " + getRootDir() + ": result is set to " + r, LOGGER.isLoggable(Level.FINER) ? new Exception() : null);
}
}
/**
* Gets the subset of {@link #getActions()} that consists of {@link BuildBadgeAction}s.
*/
public @Nonnull List<BuildBadgeAction> getBadgeActions() {
List<BuildBadgeAction> r = getActions(BuildBadgeAction.class);
if(isKeepLog()) {
r = new ArrayList<>(r);
r.add(new KeepLogBuildBadge());
}
return r;
}
/**
* Returns true if the build is not completed yet.
* This includes "not started yet" state.
*/
@Exported
public boolean isBuilding() {
return state.compareTo(State.POST_PRODUCTION) < 0;
}
/**
* Determine whether the run is being build right now.
* @return true if after started and before completed.
* @since 1.538
*/
protected boolean isInProgress() {
return state.equals(State.BUILDING) || state.equals(State.POST_PRODUCTION);
}
/**
* Returns true if the log file is still being updated.
*/
public boolean isLogUpdated() {
return state.compareTo(State.COMPLETED) < 0;
}
/**
* Gets the {@link Executor} building this job, if it's being built.
* Otherwise null.
*
* This method looks for {@link Executor} who's {@linkplain Executor#getCurrentExecutable() assigned to this build},
* and because of that this might not be necessarily in sync with the return value of {@link #isBuilding()} —
* an executor holds on to {@link Run} some more time even after the build is finished (for example to
* perform {@linkplain Run.State#POST_PRODUCTION post-production processing}.)
* @see Executor#of
*/
@Exported
public @CheckForNull Executor getExecutor() {
return this instanceof Queue.Executable ? Executor.of((Queue.Executable) this) : null;
}
/**
* Gets the one off {@link Executor} building this job, if it's being built.
* Otherwise null.
* @since 1.433
*/
public @CheckForNull Executor getOneOffExecutor() {
for( Computer c : Jenkins.getInstance().getComputers() ) {
for (Executor e : c.getOneOffExecutors()) {
if(e.getCurrentExecutable()==this)
return e;
}
}
return null;
}
/**
* Gets the charset in which the log file is written.
* @return never null.
* @since 1.257
*/
public final @Nonnull Charset getCharset() {
if(charset==null) return Charset.defaultCharset();
return Charset.forName(charset);
}
/**
* Returns the {@link Cause}s that triggered a build.
*
* <p>
* If a build sits in the queue for a long time, multiple build requests made during this period
* are all rolled up into one build, hence this method may return a list.
*
* @return
* can be empty but never null. read-only.
* @since 1.321
*/
public @Nonnull List<Cause> getCauses() {
CauseAction a = getAction(CauseAction.class);
if (a==null) return Collections.emptyList();
return Collections.unmodifiableList(a.getCauses());
}
/**
* Returns a {@link Cause} of a particular type.
*
* @since 1.362
*/
public @CheckForNull <T extends Cause> T getCause(Class<T> type) {
for (Cause c : getCauses())
if (type.isInstance(c))
return type.cast(c);
return null;
}
/**
* Returns true if this log file should be kept and not deleted.
*
* This is used as a signal to the {@link BuildDiscarder}.
*/
@Exported
public final boolean isKeepLog() {
return getWhyKeepLog()!=null;
}
/**
* If {@link #isKeepLog()} returns true, returns a short, human-readable
* sentence that explains why it's being kept.
*/
public @CheckForNull String getWhyKeepLog() {
if(keepLog)
return Messages.Run_MarkedExplicitly();
return null; // not marked at all
}
/**
* The project this build is for.
*/
public @Nonnull JobT getParent() {
return project;
}
/**
* When the build is scheduled.
*
* @see #getStartTimeInMillis()
*/
@Exported
public @Nonnull Calendar getTimestamp() {
GregorianCalendar c = new GregorianCalendar();
c.setTimeInMillis(timestamp);
return c;
}
/**
* Same as {@link #getTimestamp()} but in a different type.
*/
public final @Nonnull Date getTime() {
return new Date(timestamp);
}
/**
* Same as {@link #getTimestamp()} but in a different type, that is since the time of the epoc.
*/
public final long getTimeInMillis() {
return timestamp;
}
/**
* When the build has started running in an executor.
*
* For example, if a build is scheduled 1pm, and stayed in the queue for 1 hour (say, no idle agents),
* then this method returns 2pm, which is the time the job moved from the queue to the building state.
*
* @see #getTimestamp()
*/
public final long getStartTimeInMillis() {
if (startTime==0) return timestamp; // fallback: approximate by the queuing time
return startTime;
}
@Exported
public String getDescription() {
return description;
}
/**
* Returns the length-limited description.
* @return The length-limited description.
*/
public @Nonnull String getTruncatedDescription() {
final int maxDescrLength = 100;
if (description == null || description.length() < maxDescrLength) {
return description;
}
final String ending = "...";
final int sz = description.length(), maxTruncLength = maxDescrLength - ending.length();
boolean inTag = false;
int displayChars = 0;
int lastTruncatablePoint = -1;
for (int i=0; i<sz; i++) {
char ch = description.charAt(i);
if(ch == '<') {
inTag = true;
} else if (ch == '>') {
inTag = false;
if (displayChars <= maxTruncLength) {
lastTruncatablePoint = i + 1;
}
}
if (!inTag) {
displayChars++;
if (displayChars <= maxTruncLength && ch == ' ') {
lastTruncatablePoint = i;
}
}
}
String truncDesc = description;
// Could not find a preferred truncatable index, force a trunc at maxTruncLength
if (lastTruncatablePoint == -1)
lastTruncatablePoint = maxTruncLength;
if (displayChars >= maxDescrLength) {
truncDesc = truncDesc.substring(0, lastTruncatablePoint) + ending;
}
return truncDesc;
}
/**
* Gets the string that says how long since this build has started.
*
* @return
* string like "3 minutes" "1 day" etc.
*/
public @Nonnull String getTimestampString() {
long duration = new GregorianCalendar().getTimeInMillis()-timestamp;
return Util.getPastTimeString(duration);
}
/**
* Returns the timestamp formatted in xs:dateTime.
*/
public @Nonnull String getTimestampString2() {
return Util.XS_DATETIME_FORMATTER.format(new Date(timestamp));
}
/**
* Gets the string that says how long the build took to run.
*/
public @Nonnull String getDurationString() {
if (hasntStartedYet()) {
return Messages.Run_NotStartedYet();
} else if (isBuilding()) {
return Messages.Run_InProgressDuration(
Util.getTimeSpanString(System.currentTimeMillis()-startTime));
}
return Util.getTimeSpanString(duration);
}
/**
* Gets the millisecond it took to build.
*/
@Exported
public long getDuration() {
return duration;
}
/**
* Gets the icon color for display.
*/
public @Nonnull BallColor getIconColor() {
if(!isBuilding()) {
// already built
return getResult().color;
}
// a new build is in progress
BallColor baseColor;
RunT pb = getPreviousBuild();
if(pb==null)
baseColor = BallColor.NOTBUILT;
else
baseColor = pb.getIconColor();
return baseColor.anime();
}
/**
* Returns true if the build is still queued and hasn't started yet.
*/
public boolean hasntStartedYet() {
return state ==State.NOT_STARTED;
}
@Override
public String toString() {
return project.getFullName() + " #" + number;
}
@Exported
public String getFullDisplayName() {
return project.getFullDisplayName()+' '+getDisplayName();
}
@Exported
public String getDisplayName() {
return displayName!=null ? displayName : "#"+number;
}
public boolean hasCustomDisplayName() {
return displayName!=null;
}
/**
* @param value
* Set to null to revert back to the default "#NNN".
*/
public void setDisplayName(String value) throws IOException {
checkPermission(UPDATE);
this.displayName = value;
save();
}
@Exported(visibility=2)
public int getNumber() {
return number;
}
/**
* Called by {@link RunMap} to obtain a reference to this run.
* @return Reference to the build. Never null
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#createReference
* @since 1.556
*/
protected @Nonnull BuildReference<RunT> createReference() {
return new BuildReference<RunT>(getId(), _this());
}
/**
* Called by {@link RunMap} to drop bi-directional links in preparation for
* deleting a build.
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#dropLinks
* @since 1.556
*/
protected void dropLinks() {
if(nextBuild!=null)
nextBuild.previousBuild = previousBuild;
if(previousBuild!=null)
previousBuild.nextBuild = nextBuild;
}
/**
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#getPreviousBuild
*/
public @CheckForNull RunT getPreviousBuild() {
return previousBuild;
}
/**
* Gets the most recent {@linkplain #isBuilding() completed} build excluding 'this' Run itself.
*/
public final @CheckForNull RunT getPreviousCompletedBuild() {
RunT r=getPreviousBuild();
while (r!=null && r.isBuilding())
r=r.getPreviousBuild();
return r;
}
/**
* Obtains the next younger build in progress. It uses a skip-pointer so that we can compute this without
* O(n) computation time. This method also fixes up the skip list as we go, in a way that's concurrency safe.
*
* <p>
* We basically follow the existing skip list, and wherever we find a non-optimal pointer, we remember them
* in 'fixUp' and update them later.
*/
public final @CheckForNull RunT getPreviousBuildInProgress() {
if(previousBuildInProgress==this) return null; // the most common case
List<RunT> fixUp = new ArrayList<RunT>();
RunT r = _this(); // 'r' is the source of the pointer (so that we can add it to fix up if we find that the target of the pointer is inefficient.)
RunT answer;
while (true) {
RunT n = r.previousBuildInProgress;
if (n==null) {// no field computed yet.
n=r.getPreviousBuild();
fixUp.add(r);
}
if (r==n || n==null) {
// this indicates that we know there's no build in progress beyond this point
answer = null;
break;
}
if (n.isBuilding()) {
// we now know 'n' is the target we wanted
answer = n;
break;
}
fixUp.add(r); // r contains the stale 'previousBuildInProgress' back pointer
r = n;
}
// fix up so that the next look up will run faster
for (RunT f : fixUp)
f.previousBuildInProgress = answer==null ? f : answer;
return answer;
}
/**
* Returns the last build that was actually built - i.e., skipping any with Result.NOT_BUILT
*/
public @CheckForNull RunT getPreviousBuiltBuild() {
RunT r=getPreviousBuild();
// in certain situations (aborted m2 builds) r.getResult() can still be null, although it should theoretically never happen
while( r!=null && (r.getResult() == null || r.getResult()==Result.NOT_BUILT) )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last build that didn't fail before this build.
*/
public @CheckForNull RunT getPreviousNotFailedBuild() {
RunT r=getPreviousBuild();
while( r!=null && r.getResult()==Result.FAILURE )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last failed build before this build.
*/
public @CheckForNull RunT getPreviousFailedBuild() {
RunT r=getPreviousBuild();
while( r!=null && r.getResult()!=Result.FAILURE )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last successful build before this build.
* @since 1.383
*/
public @CheckForNull RunT getPreviousSuccessfulBuild() {
RunT r=getPreviousBuild();
while( r!=null && r.getResult()!=Result.SUCCESS )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last {@code numberOfBuilds} builds with a build result ≥ {@code threshold}.
*
* @param numberOfBuilds the desired number of builds
* @param threshold the build result threshold
* @return a list with the builds (youngest build first).
* May be smaller than 'numberOfBuilds' or even empty
* if not enough builds satisfying the threshold have been found. Never null.
* @since 1.383
*/
public @Nonnull List<RunT> getPreviousBuildsOverThreshold(int numberOfBuilds, @Nonnull Result threshold) {
List<RunT> builds = new ArrayList<RunT>(numberOfBuilds);
RunT r = getPreviousBuild();
while (r != null && builds.size() < numberOfBuilds) {
if (!r.isBuilding() &&
(r.getResult() != null && r.getResult().isBetterOrEqualTo(threshold))) {
builds.add(r);
}
r = r.getPreviousBuild();
}
return builds;
}
/**
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#getNextBuild
*/
public @CheckForNull RunT getNextBuild() {
return nextBuild;
}
/**
* Returns the URL of this {@link Run}, relative to the context root of Hudson.
*
* @return
* String like "job/foo/32/" with trailing slash but no leading slash.
*/
// I really messed this up. I'm hoping to fix this some time
// it shouldn't have trailing '/', and instead it should have leading '/'
public @Nonnull String getUrl() {
// RUN may be accessed using permalinks, as "/lastSuccessful" or other, so try to retrieve this base URL
// looking for "this" in the current request ancestors
// @see also {@link AbstractItem#getUrl}
StaplerRequest req = Stapler.getCurrentRequest();
if (req != null) {
String seed = Functions.getNearestAncestorUrl(req,this);
if(seed!=null) {
// trim off the context path portion and leading '/', but add trailing '/'
return seed.substring(req.getContextPath().length()+1)+'/';
}
}
return project.getUrl()+getNumber()+'/';
}
/**
* Obtains the absolute URL to this build.
*
* @deprecated
* This method shall <b>NEVER</b> be used during HTML page rendering, as it's too easy for
* misconfiguration to break this value, with network set up like Apache reverse proxy.
* This method is only intended for the remote API clients who cannot resolve relative references.
*/
@Exported(visibility=2,name="url")
@Deprecated
public final @Nonnull String getAbsoluteUrl() {
return project.getAbsoluteUrl()+getNumber()+'/';
}
public final @Nonnull String getSearchUrl() {
return getNumber()+"/";
}
/**
* Unique ID of this build.
* Usually the decimal form of {@link #number}, but may be a formatted timestamp for historical builds.
*/
@Exported
public @Nonnull String getId() {
return id != null ? id : Integer.toString(number);
}
@Override
public @CheckForNull Descriptor getDescriptorByName(String className) {
return Jenkins.getInstance().getDescriptorByName(className);
}
/**
* Get the root directory of this {@link Run} on the master.
* Files related to this {@link Run} should be stored below this directory.
* @return Root directory of this {@link Run} on the master. Never null
*/
@Override
public @Nonnull File getRootDir() {
return new File(project.getBuildDir(), Integer.toString(number));
}
/**
* Gets an object responsible for storing and retrieving build artifacts.
* If {@link #pickArtifactManager} has previously been called on this build,
* and a nondefault manager selected, that will be returned.
* Otherwise (including if we are loading a historical build created prior to this feature) {@link StandardArtifactManager} is used.
* <p>This method should be used when existing artifacts are to be loaded, displayed, or removed.
* If adding artifacts, use {@link #pickArtifactManager} instead.
* @return an appropriate artifact manager
* @since 1.532
*/
public final @Nonnull ArtifactManager getArtifactManager() {
return artifactManager != null ? artifactManager : new StandardArtifactManager(this);
}
/**
* Selects an object responsible for storing and retrieving build artifacts.
* The first time this is called on a running build, {@link ArtifactManagerConfiguration} is checked
* to see if one will handle this build.
* If so, that manager is saved in the build and it will be used henceforth.
* If no manager claimed the build, {@link StandardArtifactManager} is used.
* <p>This method should be used when a build step expects to archive some artifacts.
* If only displaying existing artifacts, use {@link #getArtifactManager} instead.
* @return an appropriate artifact manager
* @throws IOException if a custom manager was selected but the selection could not be saved
* @since 1.532
*/
public final synchronized @Nonnull ArtifactManager pickArtifactManager() throws IOException {
if (artifactManager != null) {
return artifactManager;
} else {
for (ArtifactManagerFactory f : ArtifactManagerConfiguration.get().getArtifactManagerFactories()) {
ArtifactManager mgr = f.managerFor(this);
if (mgr != null) {
artifactManager = mgr;
save();
return mgr;
}
}
return new StandardArtifactManager(this);
}
}
/**
* Gets the directory where the artifacts are archived.
* @deprecated Should only be used from {@link StandardArtifactManager} or subclasses.
*/
@Deprecated
public File getArtifactsDir() {
return new File(getRootDir(),"archive");
}
/**
* Gets the artifacts (relative to {@link #getArtifactsDir()}.
* @return The list can be empty but never null
*/
@Exported
public @Nonnull List<Artifact> getArtifacts() {
return getArtifactsUpTo(Integer.MAX_VALUE);
}
/**
* Gets the first N artifacts.
* @return The list can be empty but never null
*/
public @Nonnull List<Artifact> getArtifactsUpTo(int artifactsNumber) {
ArtifactList r = new ArtifactList();
try {
addArtifacts(getArtifactManager().root(), "", "", r, null, artifactsNumber);
} catch (IOException x) {
LOGGER.log(Level.WARNING, null, x);
}
r.computeDisplayName();
return r;
}
/**
* Check if the {@link Run} contains artifacts.
* The strange method name is so that we can access it from EL.
* @return true if this run has any artifacts
*/
public boolean getHasArtifacts() {
return !getArtifactsUpTo(1).isEmpty();
}
private int addArtifacts(@Nonnull VirtualFile dir,
@Nonnull String path, @Nonnull String pathHref,
@Nonnull ArtifactList r, @Nonnull Artifact parent, int upTo) throws IOException {
VirtualFile[] kids = dir.list();
Arrays.sort(kids);
int n = 0;
for (VirtualFile sub : kids) {
String child = sub.getName();
String childPath = path + child;
String childHref = pathHref + Util.rawEncode(child);
String length = sub.isFile() ? String.valueOf(sub.length()) : "";
boolean collapsed = (kids.length==1 && parent!=null);
Artifact a;
if (collapsed) {
// Collapse single items into parent node where possible:
a = new Artifact(parent.getFileName() + '/' + child, childPath,
sub.isDirectory() ? null : childHref, length,
parent.getTreeNodeId());
r.tree.put(a, r.tree.remove(parent));
} else {
// Use null href for a directory:
a = new Artifact(child, childPath,
sub.isDirectory() ? null : childHref, length,
"n" + ++r.idSeq);
r.tree.put(a, parent!=null ? parent.getTreeNodeId() : null);
}
if (sub.isDirectory()) {
n += addArtifacts(sub, childPath + '/', childHref + '/', r, a, upTo-n);
if (n>=upTo) break;
} else {
// Don't store collapsed path in ArrayList (for correct data in external API)
r.add(collapsed ? new Artifact(child, a.relativePath, a.href, length, a.treeNodeId) : a);
if (++n>=upTo) break;
}
}
return n;
}
/**
* Maximum number of artifacts to list before using switching to the tree view.
*/
public static final int LIST_CUTOFF = Integer.parseInt(SystemProperties.getString("hudson.model.Run.ArtifactList.listCutoff", "16"));
/**
* Maximum number of artifacts to show in tree view before just showing a link.
*/
public static final int TREE_CUTOFF = Integer.parseInt(SystemProperties.getString("hudson.model.Run.ArtifactList.treeCutoff", "40"));
// ..and then "too many"
public final class ArtifactList extends ArrayList<Artifact> {
private static final long serialVersionUID = 1L;
/**
* Map of Artifact to treeNodeId of parent node in tree view.
* Contains Artifact objects for directories and files (the ArrayList contains only files).
*/
private LinkedHashMap<Artifact,String> tree = new LinkedHashMap<Artifact,String>();
private int idSeq = 0;
public Map<Artifact,String> getTree() {
return tree;
}
public void computeDisplayName() {
if(size()>LIST_CUTOFF) return; // we are not going to display file names, so no point in computing this
int maxDepth = 0;
int[] len = new int[size()];
String[][] tokens = new String[size()][];
for( int i=0; i<tokens.length; i++ ) {
tokens[i] = get(i).relativePath.split("[\\\\/]+");
maxDepth = Math.max(maxDepth,tokens[i].length);
len[i] = 1;
}
boolean collision;
int depth=0;
do {
collision = false;
Map<String,Integer/*index*/> names = new HashMap<String,Integer>();
for (int i = 0; i < tokens.length; i++) {
String[] token = tokens[i];
String displayName = combineLast(token,len[i]);
Integer j = names.put(displayName, i);
if(j!=null) {
collision = true;
if(j>=0)
len[j]++;
len[i]++;
names.put(displayName,-1); // occupy this name but don't let len[i] incremented with additional collisions
}
}
} while(collision && depth++<maxDepth);
for (int i = 0; i < tokens.length; i++)
get(i).displayPath = combineLast(tokens[i],len[i]);
// OUTER:
// for( int n=1; n<maxLen; n++ ) {
// // if we just display the last n token, would it be suffice for disambiguation?
// Set<String> names = new HashSet<String>();
// for (String[] token : tokens) {
// if(!names.add(combineLast(token,n)))
// continue OUTER; // collision. Increase n and try again
// }
//
// // this n successfully disambiguates
// for (int i = 0; i < tokens.length; i++) {
// String[] token = tokens[i];
// get(i).displayPath = combineLast(token,n);
// }
// return;
// }
// // it's impossible to get here, as that means
// // we have the same artifacts archived twice, but be defensive
// for (Artifact a : this)
// a.displayPath = a.relativePath;
}
/**
* Combines last N token into the "a/b/c" form.
*/
private String combineLast(String[] token, int n) {
StringBuilder buf = new StringBuilder();
for( int i=Math.max(0,token.length-n); i<token.length; i++ ) {
if(buf.length()>0) buf.append('/');
buf.append(token[i]);
}
return buf.toString();
}
}
/**
* A build artifact.
*/
@ExportedBean
public class Artifact {
/**
* Relative path name from artifacts root.
*/
@Exported(visibility=3)
public final String relativePath;
/**
* Truncated form of {@link #relativePath} just enough
* to disambiguate {@link Artifact}s.
*/
/*package*/ String displayPath;
/**
* The filename of the artifact.
* (though when directories with single items are collapsed for tree view, name may
* include multiple path components, like "dist/pkg/mypkg")
*/
private String name;
/**
* Properly encoded relativePath for use in URLs. This field is null for directories.
*/
private String href;
/**
* Id of this node for use in tree view.
*/
private String treeNodeId;
/**
*length of this artifact for files.
*/
private String length;
/*package for test*/ Artifact(String name, String relativePath, String href, String len, String treeNodeId) {
this.name = name;
this.relativePath = relativePath;
this.href = href;
this.treeNodeId = treeNodeId;
this.length = len;
}
/**
* Gets the artifact file.
* @deprecated May not be meaningful with custom artifact managers. Use {@link ArtifactManager#root} plus {@link VirtualFile#child} with {@link #relativePath} instead.
*/
@Deprecated
public @Nonnull File getFile() {
return new File(getArtifactsDir(),relativePath);
}
/**
* Returns just the file name portion, without the path.
*/
@Exported(visibility=3)
public String getFileName() {
return name;
}
@Exported(visibility=3)
public String getDisplayPath() {
return displayPath;
}
public String getHref() {
return href;
}
public String getLength() {
return length;
}
public long getFileSize(){
return Long.decode(length);
}
public String getTreeNodeId() {
return treeNodeId;
}
@Override
public String toString() {
return relativePath;
}
}
/**
* Returns the log file.
* @return The file may reference both uncompressed or compressed logs
*/
public @Nonnull File getLogFile() {
File rawF = new File(getRootDir(), "log");
if (rawF.isFile()) {
return rawF;
}
File gzF = new File(getRootDir(), "log.gz");
if (gzF.isFile()) {
return gzF;
}
//If both fail, return the standard, uncompressed log file
return rawF;
}
/**
* Returns an input stream that reads from the log file.
* It will use a gzip-compressed log file (log.gz) if that exists.
*
* @throws IOException
* @return An input stream from the log file.
* If the log file does not exist, the error message will be returned to the output.
* @since 1.349
*/
public @Nonnull InputStream getLogInputStream() throws IOException {
File logFile = getLogFile();
if (logFile.exists() ) {
// Checking if a ".gz" file was return
try {
InputStream fis = Files.newInputStream(logFile.toPath());
if (logFile.getName().endsWith(".gz")) {
return new GZIPInputStream(fis);
} else {
return fis;
}
} catch (InvalidPathException e) {
throw new IOException(e);
}
}
String message = "No such file: " + logFile;
return new ByteArrayInputStream(charset != null ? message.getBytes(charset) : message.getBytes());
}
public @Nonnull Reader getLogReader() throws IOException {
if (charset==null) return new InputStreamReader(getLogInputStream());
else return new InputStreamReader(getLogInputStream(),charset);
}
/**
* Used from <tt>console.jelly</tt> to write annotated log to the given output.
*
* @since 1.349
*/
public void writeLogTo(long offset, @Nonnull XMLOutput out) throws IOException {
try {
getLogText().writeHtmlTo(offset,out.asWriter());
} catch (IOException e) {
// try to fall back to the old getLogInputStream()
// mainly to support .gz compressed files
// In this case, console annotation handling will be turned off.
try (InputStream input = getLogInputStream()) {
IOUtils.copy(input, out.asWriter());
}
}
}
/**
* Writes the complete log from the start to finish to the {@link OutputStream}.
*
* If someone is still writing to the log, this method will not return until the whole log
* file gets written out.
* <p>
* The method does not close the {@link OutputStream}.
*/
public void writeWholeLogTo(@Nonnull OutputStream out) throws IOException, InterruptedException {
long pos = 0;
AnnotatedLargeText logText;
logText = getLogText();
pos = logText.writeLogTo(pos, out);
while (!logText.isComplete()) {
// Instead of us hitting the log file as many times as possible, instead we get the information once every
// second to avoid CPU usage getting very high.
Thread.sleep(1000);
logText = getLogText();
pos = logText.writeLogTo(pos, out);
}
}
/**
* Used to URL-bind {@link AnnotatedLargeText}.
* @return A {@link Run} log with annotations
*/
public @Nonnull AnnotatedLargeText getLogText() {
return new AnnotatedLargeText(getLogFile(),getCharset(),!isLogUpdated(),this);
}
@Override
protected @Nonnull SearchIndexBuilder makeSearchIndex() {
SearchIndexBuilder builder = super.makeSearchIndex()
.add("console")
.add("changes");
for (Action a : getAllActions()) {
if(a.getIconFileName()!=null)
builder.add(a.getUrlName());
}
return builder;
}
public @Nonnull Api getApi() {
return new Api(this);
}
@Override
public void checkPermission(@Nonnull Permission p) {
getACL().checkPermission(p);
}
@Override
public boolean hasPermission(@Nonnull Permission p) {
return getACL().hasPermission(p);
}
@Override
public ACL getACL() {
// for now, don't maintain ACL per run, and do it at project level
return getParent().getACL();
}
/**
* Deletes this build's artifacts.
*
* @throws IOException
* if we fail to delete.
*
* @since 1.350
*/
public synchronized void deleteArtifacts() throws IOException {
try {
getArtifactManager().delete();
} catch (InterruptedException x) {
throw new IOException(x);
}
}
/**
* Deletes this build and its entire log
*
* @throws IOException
* if we fail to delete.
*/
public void delete() throws IOException {
File rootDir = getRootDir();
if (!rootDir.isDirectory()) {
throw new IOException(this + ": " + rootDir + " looks to have already been deleted; siblings: " + Arrays.toString(project.getBuildDir().list()));
}
RunListener.fireDeleted(this);
synchronized (this) { // avoid holding a lock while calling plugin impls of onDeleted
File tmp = new File(rootDir.getParentFile(),'.'+rootDir.getName());
if (tmp.exists()) {
Util.deleteRecursive(tmp);
}
// TODO on Java 7 prefer: Files.move(rootDir.toPath(), tmp.toPath(), StandardCopyOption.ATOMIC_MOVE)
boolean renamingSucceeded = rootDir.renameTo(tmp);
Util.deleteRecursive(tmp);
// some user reported that they see some left-over .xyz files in the workspace,
// so just to make sure we've really deleted it, schedule the deletion on VM exit, too.
if(tmp.exists())
tmp.deleteOnExit();
if(!renamingSucceeded)
throw new IOException(rootDir+" is in use");
LOGGER.log(FINE, "{0}: {1} successfully deleted", new Object[] {this, rootDir});
removeRunFromParent();
}
}
@SuppressWarnings("unchecked") // seems this is too clever for Java's type system?
private void removeRunFromParent() {
getParent().removeRun((RunT)this);
}
/**
* @see CheckPoint#report()
*/
/*package*/ static void reportCheckpoint(@Nonnull CheckPoint id) {
Run<?,?>.RunExecution exec = RunnerStack.INSTANCE.peek();
if (exec == null) {
return;
}
exec.checkpoints.report(id);
}
/**
* @see CheckPoint#block()
*/
/*package*/ static void waitForCheckpoint(@Nonnull CheckPoint id, @CheckForNull BuildListener listener, @CheckForNull String waiter) throws InterruptedException {
while(true) {
Run<?,?>.RunExecution exec = RunnerStack.INSTANCE.peek();
if (exec == null) {
return;
}
Run b = exec.getBuild().getPreviousBuildInProgress();
if(b==null) return; // no pending earlier build
Run.RunExecution runner = b.runner;
if(runner==null) {
// polled at the wrong moment. try again.
Thread.sleep(0);
continue;
}
if(runner.checkpoints.waitForCheckPoint(id, listener, waiter))
return; // confirmed that the previous build reached the check point
// the previous build finished without ever reaching the check point. try again.
}
}
/**
* @deprecated as of 1.467
* Please use {@link RunExecution}
*/
@Deprecated
protected abstract class Runner extends RunExecution {}
/**
* Object that lives while the build is executed, to keep track of things that
* are needed only during the build.
*/
public abstract class RunExecution {
/**
* Keeps track of the check points attained by a build, and abstracts away the synchronization needed to
* maintain this data structure.
*/
private final class CheckpointSet {
/**
* Stages of the builds that this runner has completed. This is used for concurrent {@link RunExecution}s to
* coordinate and serialize their executions where necessary.
*/
private final Set<CheckPoint> checkpoints = new HashSet<CheckPoint>();
private boolean allDone;
protected synchronized void report(@Nonnull CheckPoint identifier) {
checkpoints.add(identifier);
notifyAll();
}
protected synchronized boolean waitForCheckPoint(@Nonnull CheckPoint identifier, @CheckForNull BuildListener listener, @CheckForNull String waiter) throws InterruptedException {
final Thread t = Thread.currentThread();
final String oldName = t.getName();
t.setName(oldName + " : waiting for " + identifier + " on " + getFullDisplayName() + " from " + waiter);
try {
boolean first = true;
while (!allDone && !checkpoints.contains(identifier)) {
if (first && listener != null && waiter != null) {
listener.getLogger().println(Messages.Run__is_waiting_for_a_checkpoint_on_(waiter, getFullDisplayName()));
}
wait();
first = false;
}
return checkpoints.contains(identifier);
} finally {
t.setName(oldName);
}
}
/**
* Notifies that the build is fully completed and all the checkpoint locks be released.
*/
private synchronized void allDone() {
allDone = true;
notifyAll();
}
}
private final CheckpointSet checkpoints = new CheckpointSet();
private final Map<Object,Object> attributes = new HashMap<Object, Object>();
/**
* Performs the main build and returns the status code.
*
* @throws Exception
* exception will be recorded and the build will be considered a failure.
*/
public abstract @Nonnull Result run(@Nonnull BuildListener listener ) throws Exception, RunnerAbortedException;
/**
* Performs the post-build action.
* <p>
* This method is called after {@linkplain #run(BuildListener) the main portion of the build is completed.}
* This is a good opportunity to do notifications based on the result
* of the build. When this method is called, the build is not really
* finalized yet, and the build is still considered in progress --- for example,
* even if the build is successful, this build still won't be picked up
* by {@link Job#getLastSuccessfulBuild()}.
*/
public abstract void post(@Nonnull BuildListener listener ) throws Exception;
/**
* Performs final clean up action.
* <p>
* This method is called after {@link #post(BuildListener)},
* after the build result is fully finalized. This is the point
* where the build is already considered completed.
* <p>
* Among other things, this is often a necessary pre-condition
* before invoking other builds that depend on this build.
*/
public abstract void cleanUp(@Nonnull BuildListener listener) throws Exception;
public @Nonnull RunT getBuild() {
return _this();
}
public @Nonnull JobT getProject() {
return _this().getParent();
}
/**
* Bag of stuff to allow plugins to store state for the duration of a build
* without persisting it.
*
* @since 1.473
*/
public @Nonnull Map<Object,Object> getAttributes() {
return attributes;
}
}
/**
* Used in {@link Run.RunExecution#run} to indicates that a fatal error in a build
* is reported to {@link BuildListener} and the build should be simply aborted
* without further recording a stack trace.
*/
public static final class RunnerAbortedException extends RuntimeException {
private static final long serialVersionUID = 1L;
}
/**
* @deprecated as of 1.467
* Use {@link #execute(hudson.model.Run.RunExecution)}
*/
@Deprecated
protected final void run(@Nonnull Runner job) {
execute(job);
}
protected final void execute(@Nonnull RunExecution job) {
if(result!=null)
return; // already built.
StreamBuildListener listener=null;
runner = job;
onStartBuilding();
try {
// to set the state to COMPLETE in the end, even if the thread dies abnormally.
// otherwise the queue state becomes inconsistent
long start = System.currentTimeMillis();
try {
try {
Computer computer = Computer.currentComputer();
Charset charset = null;
if (computer != null) {
charset = computer.getDefaultCharset();
this.charset = charset.name();
}
listener = createBuildListener(job, listener, charset);
listener.started(getCauses());
Authentication auth = Jenkins.getAuthentication();
if (!auth.equals(ACL.SYSTEM)) {
String name = auth.getName();
if (!auth.equals(Jenkins.ANONYMOUS)) {
name = ModelHyperlinkNote.encodeTo(User.get(name));
}
listener.getLogger().println(Messages.Run_running_as_(name));
}
RunListener.fireStarted(this,listener);
updateSymlinks(listener);
setResult(job.run(listener));
LOGGER.log(INFO, "{0} main build action completed: {1}", new Object[] {this, result});
CheckPoint.MAIN_COMPLETED.report();
} catch (ThreadDeath t) {
throw t;
} catch( AbortException e ) {// orderly abortion.
result = Result.FAILURE;
listener.error(e.getMessage());
LOGGER.log(FINE, "Build "+this+" aborted",e);
} catch( RunnerAbortedException e ) {// orderly abortion.
result = Result.FAILURE;
LOGGER.log(FINE, "Build "+this+" aborted",e);
} catch( InterruptedException e) {
// aborted
result = Executor.currentExecutor().abortResult();
listener.getLogger().println(Messages.Run_BuildAborted());
Executor.currentExecutor().recordCauseOfInterruption(Run.this,listener);
LOGGER.log(Level.INFO, this + " aborted", e);
} catch( Throwable e ) {
handleFatalBuildProblem(listener,e);
result = Result.FAILURE;
}
// even if the main build fails fatally, try to run post build processing
job.post(listener);
} catch (ThreadDeath t) {
throw t;
} catch( Throwable e ) {
handleFatalBuildProblem(listener,e);
result = Result.FAILURE;
} finally {
long end = System.currentTimeMillis();
duration = Math.max(end - start, 0); // @see HUDSON-5844
// advance the state.
// the significance of doing this is that Jenkins
// will now see this build as completed.
// things like triggering other builds requires this as pre-condition.
// see issue #980.
LOGGER.log(FINER, "moving into POST_PRODUCTION on {0}", this);
state = State.POST_PRODUCTION;
if (listener != null) {
RunListener.fireCompleted(this,listener);
try {
job.cleanUp(listener);
} catch (Exception e) {
handleFatalBuildProblem(listener,e);
// too late to update the result now
}
listener.finished(result);
listener.closeQuietly();
}
try {
save();
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Failed to save build record",e);
}
}
try {
getParent().logRotate();
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Failed to rotate log",e);
}
} finally {
onEndBuilding();
}
}
private StreamBuildListener createBuildListener(@Nonnull RunExecution job, StreamBuildListener listener, Charset charset) throws IOException, InterruptedException {
// don't do buffering so that what's written to the listener
// gets reflected to the file immediately, which can then be
// served to the browser immediately
OutputStream logger;
try {
logger = Files.newOutputStream(getLogFile().toPath(), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
} catch (InvalidPathException e) {
throw new IOException(e);
}
RunT build = job.getBuild();
// Global log filters
for (ConsoleLogFilter filter : ConsoleLogFilter.all()) {
logger = filter.decorateLogger(build, logger);
}
// Project specific log filters
if (project instanceof BuildableItemWithBuildWrappers && build instanceof AbstractBuild) {
BuildableItemWithBuildWrappers biwbw = (BuildableItemWithBuildWrappers) project;
for (BuildWrapper bw : biwbw.getBuildWrappersList()) {
logger = bw.decorateLogger((AbstractBuild) build, logger);
}
}
listener = new StreamBuildListener(logger,charset);
return listener;
}
/**
* Makes sure that {@code lastSuccessful} and {@code lastStable} legacy links in the project’s root directory exist.
* Normally you do not need to call this explicitly, since {@link #execute} does so,
* but this may be needed if you are creating synthetic {@link Run}s as part of a container project (such as Maven builds in a module set).
* You should also ensure that {@link RunListener#fireStarted} and {@link RunListener#fireCompleted} are called.
* @param listener probably unused
* @throws InterruptedException probably not thrown
* @since 1.530
*/
public final void updateSymlinks(@Nonnull TaskListener listener) throws InterruptedException {
createSymlink(listener, "lastSuccessful", PermalinkProjectAction.Permalink.LAST_SUCCESSFUL_BUILD);
createSymlink(listener, "lastStable", PermalinkProjectAction.Permalink.LAST_STABLE_BUILD);
}
/**
* Backward compatibility.
*
* We used to have $JENKINS_HOME/jobs/JOBNAME/lastStable and lastSuccessful symlinked to the appropriate
* builds, but now those are done in {@link PeepholePermalink}. So here, we simply create symlinks that
* resolves to the symlink created by {@link PeepholePermalink}.
*/
private void createSymlink(@Nonnull TaskListener listener, @Nonnull String name, @Nonnull PermalinkProjectAction.Permalink target) throws InterruptedException {
File buildDir = getParent().getBuildDir();
File rootDir = getParent().getRootDir();
String targetDir;
if (buildDir.equals(new File(rootDir, "builds"))) {
targetDir = "builds" + File.separator + target.getId();
} else {
targetDir = buildDir + File.separator + target.getId();
}
Util.createSymlink(rootDir, targetDir, name, listener);
}
/**
* Handles a fatal build problem (exception) that occurred during the build.
*/
private void handleFatalBuildProblem(@Nonnull BuildListener listener, @Nonnull Throwable e) {
if(listener!=null) {
LOGGER.log(FINE, getDisplayName()+" failed to build",e);
if(e instanceof IOException)
Util.displayIOException((IOException)e,listener);
Functions.printStackTrace(e, listener.fatalError(e.getMessage()));
} else {
LOGGER.log(SEVERE, getDisplayName()+" failed to build and we don't even have a listener",e);
}
}
/**
* Called when a job started building.
*/
protected void onStartBuilding() {
LOGGER.log(FINER, "moving to BUILDING on {0}", this);
state = State.BUILDING;
startTime = System.currentTimeMillis();
if (runner!=null)
RunnerStack.INSTANCE.push(runner);
RunListener.fireInitialize(this);
}
/**
* Called when a job finished building normally or abnormally.
*/
protected void onEndBuilding() {
// signal that we've finished building.
state = State.COMPLETED;
LOGGER.log(FINER, "moving to COMPLETED on {0}", this);
if (runner!=null) {
// MavenBuilds may be created without their corresponding runners.
runner.checkpoints.allDone();
runner = null;
RunnerStack.INSTANCE.pop();
}
if (result == null) {
result = Result.FAILURE;
LOGGER.log(WARNING, "{0}: No build result is set, so marking as failure. This should not happen.", this);
}
RunListener.fireFinalized(this);
}
/**
* Save the settings to a file.
*/
public synchronized void save() throws IOException {
if(BulkChange.contains(this)) return;
getDataFile().write(this);
SaveableListener.fireOnChange(this, getDataFile());
}
private @Nonnull XmlFile getDataFile() {
return new XmlFile(XSTREAM,new File(getRootDir(),"build.xml"));
}
/**
* Gets the log of the build as a string.
* @return Returns the log or an empty string if it has not been found
* @deprecated since 2007-11-11.
* Use {@link #getLog(int)} instead as it avoids loading
* the whole log into memory unnecessarily.
*/
@Deprecated
public @Nonnull String getLog() throws IOException {
return Util.loadFile(getLogFile(),getCharset());
}
/**
* Gets the log of the build as a list of strings (one per log line).
* The number of lines returned is constrained by the maxLines parameter.
*
* @param maxLines The maximum number of log lines to return. If the log
* is bigger than this, only the most recent lines are returned.
* @return A list of log lines. Will have no more than maxLines elements.
* @throws IOException If there is a problem reading the log file.
*/
public @Nonnull List<String> getLog(int maxLines) throws IOException {
if (maxLines == 0) {
return Collections.emptyList();
}
int lines = 0;
long filePointer;
final List<String> lastLines = new ArrayList<>(Math.min(maxLines, 128));
final List<Byte> bytes = new ArrayList<>();
try (RandomAccessFile fileHandler = new RandomAccessFile(getLogFile(), "r")) {
long fileLength = fileHandler.length() - 1;
for (filePointer = fileLength; filePointer != -1 && maxLines != lines; filePointer--) {
fileHandler.seek(filePointer);
byte readByte = fileHandler.readByte();
if (readByte == 0x0A) {
if (filePointer < fileLength) {
lines = lines + 1;
lastLines.add(convertBytesToString(bytes));
bytes.clear();
}
} else if (readByte != 0xD) {
bytes.add(readByte);
}
}
}
if (lines != maxLines) {
lastLines.add(convertBytesToString(bytes));
}
Collections.reverse(lastLines);
// If the log has been truncated, include that information.
// Use set (replaces the first element) rather than add so that
// the list doesn't grow beyond the specified maximum number of lines.
if (lines == maxLines) {
lastLines.set(0, "[...truncated " + Functions.humanReadableByteSize(filePointer)+ "...]");
}
return ConsoleNote.removeNotes(lastLines);
}
private String convertBytesToString(List<Byte> bytes) {
Collections.reverse(bytes);
Byte[] byteArray = bytes.toArray(new Byte[bytes.size()]);
return new String(ArrayUtils.toPrimitive(byteArray), getCharset());
}
public void doBuildStatus( StaplerRequest req, StaplerResponse rsp ) throws IOException {
rsp.sendRedirect2(req.getContextPath()+"/images/48x48/"+getBuildStatusUrl());
}
public @Nonnull String getBuildStatusUrl() {
return getIconColor().getImage();
}
public String getBuildStatusIconClassName() {
return getIconColor().getIconClassName();
}
public static class Summary {
/**
* Is this build worse or better, compared to the previous build?
*/
public boolean isWorse;
public String message;
public Summary(boolean worse, String message) {
this.isWorse = worse;
this.message = message;
}
}
/**
* Used to implement {@link #getBuildStatusSummary}.
* @since 1.575
*/
public static abstract class StatusSummarizer implements ExtensionPoint {
/**
* Possibly summarizes the reasons for a build’s status.
* @param run a completed build
* @param trend the result of {@link ResultTrend#getResultTrend(hudson.model.Run)} on {@code run} (precomputed for efficiency)
* @return a summary, or null to fall back to other summarizers or built-in behavior
*/
public abstract @CheckForNull Summary summarize(@Nonnull Run<?,?> run, @Nonnull ResultTrend trend);
}
/**
* Gets an object which represents the single line summary of the status of this build
* (especially in comparison with the previous build.)
* @see StatusSummarizer
*/
public @Nonnull Summary getBuildStatusSummary() {
if (isBuilding()) {
return new Summary(false, Messages.Run_Summary_Unknown());
}
ResultTrend trend = ResultTrend.getResultTrend(this);
for (StatusSummarizer summarizer : ExtensionList.lookup(StatusSummarizer.class)) {
Summary summary = summarizer.summarize(this, trend);
if (summary != null) {
return summary;
}
}
switch (trend) {
case ABORTED : return new Summary(false, Messages.Run_Summary_Aborted());
case NOT_BUILT : return new Summary(false, Messages.Run_Summary_NotBuilt());
case FAILURE : return new Summary(true, Messages.Run_Summary_BrokenSinceThisBuild());
case STILL_FAILING :
RunT since = getPreviousNotFailedBuild();
if(since==null)
return new Summary(false, Messages.Run_Summary_BrokenForALongTime());
RunT failedBuild = since.getNextBuild();
return new Summary(false, Messages.Run_Summary_BrokenSince(failedBuild.getDisplayName()));
case NOW_UNSTABLE:
case STILL_UNSTABLE :
return new Summary(false, Messages.Run_Summary_Unstable());
case UNSTABLE :
return new Summary(true, Messages.Run_Summary_Unstable());
case SUCCESS :
return new Summary(false, Messages.Run_Summary_Stable());
case FIXED :
return new Summary(false, Messages.Run_Summary_BackToNormal());
}
return new Summary(false, Messages.Run_Summary_Unknown());
}
/**
* Serves the artifacts.
* @throws AccessDeniedException Access denied
*/
public @Nonnull DirectoryBrowserSupport doArtifact() {
if(Functions.isArtifactsPermissionEnabled()) {
checkPermission(ARTIFACTS);
}
return new DirectoryBrowserSupport(this, getArtifactManager().root(), Messages.Run_ArtifactsBrowserTitle(project.getDisplayName(), getDisplayName()), "package.png", true);
}
/**
* Returns the build number in the body.
*/
public void doBuildNumber(StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain");
rsp.setCharacterEncoding("US-ASCII");
rsp.setStatus(HttpServletResponse.SC_OK);
rsp.getWriter().print(number);
}
/**
* Returns the build time stamp in the body.
*/
public void doBuildTimestamp( StaplerRequest req, StaplerResponse rsp, @QueryParameter String format) throws IOException {
rsp.setContentType("text/plain");
rsp.setCharacterEncoding("US-ASCII");
rsp.setStatus(HttpServletResponse.SC_OK);
DateFormat df = format==null ?
DateFormat.getDateTimeInstance(DateFormat.SHORT,DateFormat.SHORT, Locale.ENGLISH) :
new SimpleDateFormat(format,req.getLocale());
rsp.getWriter().print(df.format(getTime()));
}
/**
* Sends out the raw console output.
*/
public void doConsoleText(StaplerRequest req, StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain;charset=UTF-8");
;
try (InputStream input = getLogInputStream();
OutputStream os = rsp.getCompressedOutputStream(req);
PlainTextConsoleOutputStream out = new PlainTextConsoleOutputStream(os)) {
IOUtils.copy(input, out);
}
}
/**
* Handles incremental log output.
* @deprecated as of 1.352
* Use {@code getLogText().doProgressiveText(req,rsp)}
*/
@Deprecated
public void doProgressiveLog( StaplerRequest req, StaplerResponse rsp) throws IOException {
getLogText().doProgressText(req,rsp);
}
/**
* Checks whether keep status can be toggled.
* Normally it can, but if there is a complex reason (from subclasses) why this build must be kept, the toggle is meaningless.
* @return true if {@link #doToggleLogKeep} and {@link #keepLog(boolean)} and {@link #keepLog()} are options
* @since 1.510
*/
public boolean canToggleLogKeep() {
if (!keepLog && isKeepLog()) {
// Definitely prevented.
return false;
}
// TODO may be that keepLog is on (perhaps toggler earlier) yet isKeepLog() would be true anyway.
// In such a case this will incorrectly return true and logKeep.jelly will allow the toggle.
// However at least then (after redirecting to the same page) the toggle button will correctly disappear.
return true;
}
@RequirePOST
public void doToggleLogKeep( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
keepLog(!keepLog);
rsp.forwardToPreviousPage(req);
}
/**
* Marks this build to keep the log.
*/
@CLIMethod(name="keep-build")
public final void keepLog() throws IOException {
keepLog(true);
}
public void keepLog(boolean newValue) throws IOException {
checkPermission(newValue ? UPDATE : DELETE);
keepLog = newValue;
save();
}
/**
* Deletes the build when the button is pressed.
*/
@RequirePOST
public void doDoDelete( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
checkPermission(DELETE);
// We should not simply delete the build if it has been explicitly
// marked to be preserved, or if the build should not be deleted
// due to dependencies!
String why = getWhyKeepLog();
if (why!=null) {
sendError(Messages.Run_UnableToDelete(getFullDisplayName(), why), req, rsp);
return;
}
try{
delete();
}
catch(IOException ex){
req.setAttribute("stackTraces", Functions.printThrowable(ex));
req.getView(this, "delete-retry.jelly").forward(req, rsp);
return;
}
rsp.sendRedirect2(req.getContextPath()+'/' + getParent().getUrl());
}
public void setDescription(String description) throws IOException {
checkPermission(UPDATE);
this.description = description;
save();
}
/**
* Accepts the new description.
*/
@RequirePOST
public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
setDescription(req.getParameter("description"));
rsp.sendRedirect("."); // go to the top page
}
/**
* @deprecated as of 1.292
* Use {@link #getEnvironment(TaskListener)} instead.
*/
@Deprecated
public Map<String,String> getEnvVars() {
LOGGER.log(WARNING, "deprecated call to Run.getEnvVars\n\tat {0}", new Throwable().getStackTrace()[1]);
try {
return getEnvironment(new LogTaskListener(LOGGER, Level.INFO));
} catch (IOException e) {
return new EnvVars();
} catch (InterruptedException e) {
return new EnvVars();
}
}
/**
* @deprecated as of 1.305 use {@link #getEnvironment(TaskListener)}
*/
@Deprecated
public EnvVars getEnvironment() throws IOException, InterruptedException {
LOGGER.log(WARNING, "deprecated call to Run.getEnvironment\n\tat {0}", new Throwable().getStackTrace()[1]);
return getEnvironment(new LogTaskListener(LOGGER, Level.INFO));
}
/**
* Returns the map that contains environmental variables to be used for launching
* processes for this build.
*
* <p>
* {@link hudson.tasks.BuildStep}s that invoke external processes should use this.
* This allows {@link BuildWrapper}s and other project configurations (such as JDK selection)
* to take effect.
*
* <p>
* Unlike earlier {@link #getEnvVars()}, this map contains the whole environment,
* not just the overrides, so one can introspect values to change its behavior.
*
* @return the map with the environmental variables.
* @since 1.305
*/
public @Nonnull EnvVars getEnvironment(@Nonnull TaskListener listener) throws IOException, InterruptedException {
Computer c = Computer.currentComputer();
Node n = c==null ? null : c.getNode();
EnvVars env = getParent().getEnvironment(n,listener);
env.putAll(getCharacteristicEnvVars());
// apply them in a reverse order so that higher ordinal ones can modify values added by lower ordinal ones
for (EnvironmentContributor ec : EnvironmentContributor.all().reverseView())
ec.buildEnvironmentFor(this,env,listener);
return env;
}
/**
* Builds up the environment variable map that's sufficient to identify a process
* as ours. This is used to kill run-away processes via {@link ProcessTree#killAll(Map)}.
*/
public @Nonnull final EnvVars getCharacteristicEnvVars() {
EnvVars env = getParent().getCharacteristicEnvVars();
env.put("BUILD_NUMBER",String.valueOf(number));
env.put("BUILD_ID",getId());
env.put("BUILD_TAG","jenkins-"+getParent().getFullName().replace('/', '-')+"-"+number);
return env;
}
/**
* Produces an identifier for this run unique in the system.
* @return the {@link Job#getFullName}, then {@code #}, then {@link #getNumber}
* @see #fromExternalizableId
*/
public @Nonnull String getExternalizableId() {
return project.getFullName() + "#" + getNumber();
}
/**
* Tries to find a run from an persisted identifier.
* @param id as produced by {@link #getExternalizableId}
* @return the same run, or null if the job or run was not found
* @throws IllegalArgumentException if the ID is malformed
*/
public @CheckForNull static Run<?,?> fromExternalizableId(String id) throws IllegalArgumentException {
int hash = id.lastIndexOf('#');
if (hash <= 0) {
throw new IllegalArgumentException("Invalid id");
}
String jobName = id.substring(0, hash);
int number;
try {
number = Integer.parseInt(id.substring(hash + 1));
} catch (NumberFormatException x) {
throw new IllegalArgumentException(x);
}
Jenkins j = Jenkins.getInstance();
Job<?,?> job = j.getItemByFullName(jobName, Job.class);
if (job == null) {
return null;
}
return job.getBuildByNumber(number);
}
/**
* Returns the estimated duration for this run if it is currently running.
* Default to {@link Job#getEstimatedDuration()}, may be overridden in subclasses
* if duration may depend on run specific parameters (like incremental Maven builds).
*
* @return the estimated duration in milliseconds
* @since 1.383
*/
@Exported
public long getEstimatedDuration() {
return project.getEstimatedDuration();
}
@RequirePOST
public @Nonnull HttpResponse doConfigSubmit( StaplerRequest req ) throws IOException, ServletException, FormException {
checkPermission(UPDATE);
try (BulkChange bc = new BulkChange(this)) {
JSONObject json = req.getSubmittedForm();
submit(json);
bc.commit();
}
return FormApply.success(".");
}
protected void submit(JSONObject json) throws IOException {
setDisplayName(Util.fixEmptyAndTrim(json.getString("displayName")));
setDescription(json.getString("description"));
}
public static final XStream XSTREAM = new XStream2();
/**
* Alias to {@link #XSTREAM} so that one can access additional methods on {@link XStream2} more easily.
*/
public static final XStream2 XSTREAM2 = (XStream2)XSTREAM;
static {
XSTREAM.alias("build",FreeStyleBuild.class);
XSTREAM.registerConverter(Result.conv);
}
private static final Logger LOGGER = Logger.getLogger(Run.class.getName());
/**
* Sort by date. Newer ones first.
*/
public static final Comparator<Run> ORDER_BY_DATE = new Comparator<Run>() {
public int compare(@Nonnull Run lhs, @Nonnull Run rhs) {
long lt = lhs.getTimeInMillis();
long rt = rhs.getTimeInMillis();
if(lt>rt) return -1;
if(lt<rt) return 1;
return 0;
}
};
/**
* {@link FeedAdapter} to produce feed from the summary of this build.
*/
public static final FeedAdapter<Run> FEED_ADAPTER = new DefaultFeedAdapter();
/**
* {@link FeedAdapter} to produce feeds to show one build per project.
*/
public static final FeedAdapter<Run> FEED_ADAPTER_LATEST = new DefaultFeedAdapter() {
/**
* The entry unique ID needs to be tied to a project, so that
* new builds will replace the old result.
*/
@Override
public String getEntryID(Run e) {
// can't use a meaningful year field unless we remember when the job was created.
return "tag:hudson.dev.java.net,2008:"+e.getParent().getAbsoluteUrl();
}
};
/**
* {@link BuildBadgeAction} that shows the logs are being kept.
*/
public final class KeepLogBuildBadge implements BuildBadgeAction {
public @CheckForNull String getIconFileName() { return null; }
public @CheckForNull String getDisplayName() { return null; }
public @CheckForNull String getUrlName() { return null; }
public @CheckForNull String getWhyKeepLog() { return Run.this.getWhyKeepLog(); }
}
public static final PermissionGroup PERMISSIONS = new PermissionGroup(Run.class,Messages._Run_Permissions_Title());
public static final Permission DELETE = new Permission(PERMISSIONS,"Delete",Messages._Run_DeletePermission_Description(),Permission.DELETE, PermissionScope.RUN);
public static final Permission UPDATE = new Permission(PERMISSIONS,"Update",Messages._Run_UpdatePermission_Description(),Permission.UPDATE, PermissionScope.RUN);
/** See {@link hudson.Functions#isArtifactsPermissionEnabled} */
public static final Permission ARTIFACTS = new Permission(PERMISSIONS,"Artifacts",Messages._Run_ArtifactsPermission_Description(), null,
Functions.isArtifactsPermissionEnabled(), new PermissionScope[]{PermissionScope.RUN});
private static class DefaultFeedAdapter implements FeedAdapter<Run> {
public String getEntryTitle(Run entry) {
return entry.getFullDisplayName()+" ("+entry.getBuildStatusSummary().message+")";
}
public String getEntryUrl(Run entry) {
return entry.getUrl();
}
public String getEntryID(Run entry) {
return "tag:" + "hudson.dev.java.net,"
+ entry.getTimestamp().get(Calendar.YEAR) + ":"
+ entry.getParent().getFullName()+':'+entry.getId();
}
public String getEntryDescription(Run entry) {
return entry.getDescription();
}
public Calendar getEntryTimestamp(Run entry) {
return entry.getTimestamp();
}
public String getEntryAuthor(Run entry) {
return JenkinsLocationConfiguration.get().getAdminAddress();
}
}
@Override
public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) {
Object returnedResult = super.getDynamic(token, req, rsp);
if (returnedResult == null){
//check transient actions too
for(Action action: getTransientActions()){
String urlName = action.getUrlName();
if (urlName == null) {
continue;
}
if (urlName.equals(token)) {
return action;
}
}
// Next/Previous Build links on an action page (like /job/Abc/123/testReport)
// will also point to same action (/job/Abc/124/testReport), but other builds
// may not have the action.. tell browsers to redirect up to the build page.
returnedResult = new RedirectUp();
}
return returnedResult;
}
public static class RedirectUp {
public void doDynamic(StaplerResponse rsp) throws IOException {
// Compromise to handle both browsers (auto-redirect) and programmatic access
// (want accurate 404 response).. send 404 with javscript to redirect browsers.
rsp.setStatus(HttpServletResponse.SC_NOT_FOUND);
rsp.setContentType("text/html;charset=UTF-8");
PrintWriter out = rsp.getWriter();
out.println("<html><head>" +
"<meta http-equiv='refresh' content='1;url=..'/>" +
"<script>window.location.replace('..');</script>" +
"</head>" +
"<body style='background-color:white; color:white;'>" +
"Not found</body></html>");
out.flush();
}
}
}
|
core/src/main/java/hudson/model/Run.java
|
/*
* The MIT License
*
* Copyright (c) 2004-2012, Sun Microsystems, Inc., Kohsuke Kawaguchi,
* Daniel Dyer, Red Hat, Inc., Tom Huybrechts, Romain Seguy, Yahoo! Inc.,
* Darek Ostolski, CloudBees, Inc.
*
* Copyright (c) 2012, Martin Schroeder, Intel Mobile Communications GmbH
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import com.jcraft.jzlib.GZIPInputStream;
import com.thoughtworks.xstream.XStream;
import hudson.AbortException;
import hudson.BulkChange;
import hudson.EnvVars;
import hudson.ExtensionList;
import hudson.ExtensionPoint;
import hudson.FeedAdapter;
import hudson.Functions;
import hudson.console.AnnotatedLargeText;
import hudson.console.ConsoleLogFilter;
import hudson.console.ConsoleNote;
import hudson.console.ModelHyperlinkNote;
import hudson.console.PlainTextConsoleOutputStream;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.StandardOpenOption;
import jenkins.util.SystemProperties;
import hudson.Util;
import hudson.XmlFile;
import hudson.cli.declarative.CLIMethod;
import hudson.model.Descriptor.FormException;
import hudson.model.listeners.RunListener;
import hudson.model.listeners.SaveableListener;
import hudson.model.queue.Executables;
import hudson.model.queue.SubTask;
import hudson.search.SearchIndexBuilder;
import hudson.security.ACL;
import hudson.security.AccessControlled;
import hudson.security.Permission;
import hudson.security.PermissionGroup;
import hudson.security.PermissionScope;
import hudson.tasks.BuildWrapper;
import hudson.util.FormApply;
import hudson.util.LogTaskListener;
import hudson.util.ProcessTree;
import hudson.util.XStream2;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import java.io.Reader;
import java.nio.charset.Charset;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import static java.util.logging.Level.*;
import java.util.logging.Logger;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import jenkins.model.ArtifactManager;
import jenkins.model.ArtifactManagerConfiguration;
import jenkins.model.ArtifactManagerFactory;
import jenkins.model.BuildDiscarder;
import jenkins.model.Jenkins;
import jenkins.model.JenkinsLocationConfiguration;
import jenkins.model.PeepholePermalink;
import jenkins.model.RunAction2;
import jenkins.model.StandardArtifactManager;
import jenkins.model.lazy.BuildReference;
import jenkins.model.lazy.LazyBuildMixIn;
import jenkins.util.VirtualFile;
import jenkins.util.io.OnMaster;
import net.sf.json.JSONObject;
import org.acegisecurity.AccessDeniedException;
import org.acegisecurity.Authentication;
import org.apache.commons.io.IOUtils;
import org.apache.commons.jelly.XMLOutput;
import org.apache.commons.lang.ArrayUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.kohsuke.stapler.export.Exported;
import org.kohsuke.stapler.export.ExportedBean;
import org.kohsuke.stapler.interceptor.RequirePOST;
/**
* A particular execution of {@link Job}.
*
* <p>
* Custom {@link Run} type is always used in conjunction with
* a custom {@link Job} type, so there's no separate registration
* mechanism for custom {@link Run} types.
*
* @author Kohsuke Kawaguchi
* @see RunListener
*/
@ExportedBean
public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,RunT>>
extends Actionable implements ExtensionPoint, Comparable<RunT>, AccessControlled, PersistenceRoot, DescriptorByNameOwner, OnMaster {
/**
* The original {@link Queue.Item#getId()} has not yet been mapped onto the {@link Run} instance.
* @since 1.601
*/
public static final long QUEUE_ID_UNKNOWN = -1;
protected transient final @Nonnull JobT project;
/**
* Build number.
*
* <p>
* In earlier versions < 1.24, this number is not unique nor continuous,
* but going forward, it will, and this really replaces the build id.
*/
public transient /*final*/ int number;
/**
* The original Queue task ID from where this Run instance originated.
*/
private long queueId = Run.QUEUE_ID_UNKNOWN;
/**
* Previous build. Can be null.
* TODO JENKINS-22052 this is not actually implemented any more
*
* External code should use {@link #getPreviousBuild()}
*/
@Restricted(NoExternalUse.class)
protected volatile transient RunT previousBuild;
/**
* Next build. Can be null.
*
* External code should use {@link #getNextBuild()}
*/
@Restricted(NoExternalUse.class)
protected volatile transient RunT nextBuild;
/**
* Pointer to the next younger build in progress. This data structure is lazily updated,
* so it may point to the build that's already completed. This pointer is set to 'this'
* if the computation determines that everything earlier than this build is already completed.
*/
/* does not compile on JDK 7: private*/ volatile transient RunT previousBuildInProgress;
/** ID as used for historical build records; otherwise null. */
private @CheckForNull String id;
/**
* When the build is scheduled.
*/
protected /*final*/ long timestamp;
/**
* When the build has started running.
*
* For historical reasons, 0 means no value is recorded.
*
* @see #getStartTimeInMillis()
*/
private long startTime;
/**
* The build result.
* This value may change while the state is in {@link Run.State#BUILDING}.
*/
protected volatile Result result;
/**
* Human-readable description. Can be null.
*/
protected volatile String description;
/**
* Human-readable name of this build. Can be null.
* If non-null, this text is displayed instead of "#NNN", which is the default.
* @since 1.390
*/
private volatile String displayName;
/**
* The current build state.
*/
private volatile transient State state;
private static enum State {
/**
* Build is created/queued but we haven't started building it.
*/
NOT_STARTED,
/**
* Build is in progress.
*/
BUILDING,
/**
* Build is completed now, and the status is determined,
* but log files are still being updated.
*
* The significance of this state is that Jenkins
* will now see this build as completed. Things like
* "triggering other builds" requires this as pre-condition.
* See JENKINS-980.
*/
POST_PRODUCTION,
/**
* Build is completed now, and log file is closed.
*/
COMPLETED
}
/**
* Number of milli-seconds it took to run this build.
*/
protected long duration;
/**
* Charset in which the log file is written.
* For compatibility reason, this field may be null.
* For persistence, this field is string and not {@link Charset}.
*
* @see #getCharset()
* @since 1.257
*/
protected String charset;
/**
* Keeps this log entries.
*/
private boolean keepLog;
/**
* If the build is in progress, remember {@link RunExecution} that's running it.
* This field is not persisted.
*/
private volatile transient RunExecution runner;
/**
* Artifact manager associated with this build, if any.
* @since 1.532
*/
private @CheckForNull ArtifactManager artifactManager;
/**
* Creates a new {@link Run}.
* @param job Owner job
*/
protected Run(@Nonnull JobT job) throws IOException {
this(job, System.currentTimeMillis());
this.number = project.assignBuildNumber();
LOGGER.log(FINER, "new {0} @{1}", new Object[] {this, hashCode()});
}
/**
* Constructor for creating a {@link Run} object in
* an arbitrary state.
* {@link #number} must be set manually.
* <p>May be used in a {@link SubTask#createExecutable} (instead of calling {@link LazyBuildMixIn#newBuild}).
* For example, {@code MatrixConfiguration.newBuild} does this
* so that the {@link #timestamp} as well as {@link #number} are shared with the parent build.
*/
protected Run(@Nonnull JobT job, @Nonnull Calendar timestamp) {
this(job,timestamp.getTimeInMillis());
}
/** @see #Run(Job, Calendar) */
protected Run(@Nonnull JobT job, long timestamp) {
this.project = job;
this.timestamp = timestamp;
this.state = State.NOT_STARTED;
}
/**
* Loads a run from a log file.
*/
protected Run(@Nonnull JobT project, @Nonnull File buildDir) throws IOException {
this.project = project;
this.previousBuildInProgress = _this(); // loaded builds are always completed
number = Integer.parseInt(buildDir.getName());
reload();
}
/**
* Reloads the build record from disk.
*
* @since 1.410
*/
public void reload() throws IOException {
this.state = State.COMPLETED;
// TODO ABORTED would perhaps make more sense than FAILURE:
this.result = Result.FAILURE; // defensive measure. value should be overwritten by unmarshal, but just in case the saved data is inconsistent
getDataFile().unmarshal(this); // load the rest of the data
if (state == State.COMPLETED) {
LOGGER.log(FINER, "reload {0} @{1}", new Object[] {this, hashCode()});
} else {
LOGGER.log(WARNING, "reload {0} @{1} with anomalous state {2}", new Object[] {this, hashCode(), state});
}
// not calling onLoad upon reload. partly because we don't want to call that from Run constructor,
// and partly because some existing use of onLoad isn't assuming that it can be invoked multiple times.
}
/**
* Called after the build is loaded and the object is added to the build list.
*/
@SuppressWarnings("deprecation")
protected void onLoad() {
for (Action a : getAllActions()) {
if (a instanceof RunAction2) {
try {
((RunAction2) a).onLoad(this);
} catch (RuntimeException x) {
LOGGER.log(WARNING, "failed to load " + a + " from " + getDataFile(), x);
removeAction(a); // if possible; might be in an inconsistent state
}
} else if (a instanceof RunAction) {
((RunAction) a).onLoad();
}
}
if (artifactManager != null) {
artifactManager.onLoad(this);
}
}
/**
* Return all transient actions associated with this build.
*
* @return the list can be empty but never null. read only.
* @deprecated Use {@link #getAllActions} instead.
*/
@Deprecated
public List<Action> getTransientActions() {
List<Action> actions = new ArrayList<Action>();
for (TransientBuildActionFactory factory: TransientBuildActionFactory.all()) {
for (Action created : factory.createFor(this)) {
if (created == null) {
LOGGER.log(WARNING, "null action added by {0}", factory);
continue;
}
actions.add(created);
}
}
return Collections.unmodifiableList(actions);
}
/**
* {@inheritDoc}
* A {@link RunAction2} is handled specially.
*/
@SuppressWarnings("deprecation")
@Override
public void addAction(@Nonnull Action a) {
super.addAction(a);
if (a instanceof RunAction2) {
((RunAction2) a).onAttached(this);
} else if (a instanceof RunAction) {
((RunAction) a).onAttached(this);
}
}
/**
* Obtains 'this' in a more type safe signature.
*/
@SuppressWarnings({"unchecked"})
protected @Nonnull RunT _this() {
return (RunT)this;
}
/**
* Ordering based on build numbers.
* If numbers are equal order based on names of parent projects.
*/
public int compareTo(@Nonnull RunT that) {
final int res = this.number - that.number;
if (res == 0)
return this.getParent().getFullName().compareTo(that.getParent().getFullName());
return res;
}
/**
* Get the {@link Queue.Item#getId()} of the original queue item from where this Run instance
* originated.
* @return The queue item ID.
* @since 1.601
*/
@Exported
public long getQueueId() {
return queueId;
}
/**
* Set the queue item ID.
* <p>
* Mapped from the {@link Queue.Item#getId()}.
* @param queueId The queue item ID.
*/
@Restricted(NoExternalUse.class)
public void setQueueId(long queueId) {
this.queueId = queueId;
}
/**
* Returns the build result.
*
* <p>
* When a build is {@link #isBuilding() in progress}, this method
* returns an intermediate result.
* @return The status of the build, if it has completed or some build step has set a status; may be null if the build is ongoing.
*/
@Exported
public @CheckForNull Result getResult() {
return result;
}
/**
* Sets the {@link #getResult} of this build.
* Has no effect when the result is already set and worse than the proposed result.
* May only be called after the build has started and before it has moved into post-production
* (normally meaning both {@link #isInProgress} and {@link #isBuilding} are true).
* @param r the proposed new result
* @throws IllegalStateException if the build has not yet started, is in post-production, or is complete
*/
public void setResult(@Nonnull Result r) {
if (state != State.BUILDING) {
throw new IllegalStateException("cannot change build result while in " + state);
}
// result can only get worse
if (result==null || r.isWorseThan(result)) {
result = r;
LOGGER.log(FINE, this + " in " + getRootDir() + ": result is set to " + r, LOGGER.isLoggable(Level.FINER) ? new Exception() : null);
}
}
/**
* Gets the subset of {@link #getActions()} that consists of {@link BuildBadgeAction}s.
*/
public @Nonnull List<BuildBadgeAction> getBadgeActions() {
List<BuildBadgeAction> r = getActions(BuildBadgeAction.class);
if(isKeepLog()) {
r = new ArrayList<>(r);
r.add(new KeepLogBuildBadge());
}
return r;
}
/**
* Returns true if the build is not completed yet.
* This includes "not started yet" state.
*/
@Exported
public boolean isBuilding() {
return state.compareTo(State.POST_PRODUCTION) < 0;
}
/**
* Determine whether the run is being build right now.
* @return true if after started and before completed.
* @since 1.538
*/
protected boolean isInProgress() {
return state.equals(State.BUILDING) || state.equals(State.POST_PRODUCTION);
}
/**
* Returns true if the log file is still being updated.
*/
public boolean isLogUpdated() {
return state.compareTo(State.COMPLETED) < 0;
}
/**
* Gets the {@link Executor} building this job, if it's being built.
* Otherwise null.
*
* This method looks for {@link Executor} who's {@linkplain Executor#getCurrentExecutable() assigned to this build},
* and because of that this might not be necessarily in sync with the return value of {@link #isBuilding()} —
* an executor holds on to {@link Run} some more time even after the build is finished (for example to
* perform {@linkplain Run.State#POST_PRODUCTION post-production processing}.)
* @see Executor#of
*/
@Exported
public @CheckForNull Executor getExecutor() {
return this instanceof Queue.Executable ? Executor.of((Queue.Executable) this) : null;
}
/**
* Gets the one off {@link Executor} building this job, if it's being built.
* Otherwise null.
* @since 1.433
*/
public @CheckForNull Executor getOneOffExecutor() {
for( Computer c : Jenkins.getInstance().getComputers() ) {
for (Executor e : c.getOneOffExecutors()) {
if(e.getCurrentExecutable()==this)
return e;
}
}
return null;
}
/**
* Gets the charset in which the log file is written.
* @return never null.
* @since 1.257
*/
public final @Nonnull Charset getCharset() {
if(charset==null) return Charset.defaultCharset();
return Charset.forName(charset);
}
/**
* Returns the {@link Cause}s that triggered a build.
*
* <p>
* If a build sits in the queue for a long time, multiple build requests made during this period
* are all rolled up into one build, hence this method may return a list.
*
* @return
* can be empty but never null. read-only.
* @since 1.321
*/
public @Nonnull List<Cause> getCauses() {
CauseAction a = getAction(CauseAction.class);
if (a==null) return Collections.emptyList();
return Collections.unmodifiableList(a.getCauses());
}
/**
* Returns a {@link Cause} of a particular type.
*
* @since 1.362
*/
public @CheckForNull <T extends Cause> T getCause(Class<T> type) {
for (Cause c : getCauses())
if (type.isInstance(c))
return type.cast(c);
return null;
}
/**
* Returns true if this log file should be kept and not deleted.
*
* This is used as a signal to the {@link BuildDiscarder}.
*/
@Exported
public final boolean isKeepLog() {
return getWhyKeepLog()!=null;
}
/**
* If {@link #isKeepLog()} returns true, returns a short, human-readable
* sentence that explains why it's being kept.
*/
public @CheckForNull String getWhyKeepLog() {
if(keepLog)
return Messages.Run_MarkedExplicitly();
return null; // not marked at all
}
/**
* The project this build is for.
*/
public @Nonnull JobT getParent() {
return project;
}
/**
* When the build is scheduled.
*
* @see #getStartTimeInMillis()
*/
@Exported
public @Nonnull Calendar getTimestamp() {
GregorianCalendar c = new GregorianCalendar();
c.setTimeInMillis(timestamp);
return c;
}
/**
* Same as {@link #getTimestamp()} but in a different type.
*/
public final @Nonnull Date getTime() {
return new Date(timestamp);
}
/**
* Same as {@link #getTimestamp()} but in a different type, that is since the time of the epoc.
*/
public final long getTimeInMillis() {
return timestamp;
}
/**
* When the build has started running in an executor.
*
* For example, if a build is scheduled 1pm, and stayed in the queue for 1 hour (say, no idle agents),
* then this method returns 2pm, which is the time the job moved from the queue to the building state.
*
* @see #getTimestamp()
*/
public final long getStartTimeInMillis() {
if (startTime==0) return timestamp; // fallback: approximate by the queuing time
return startTime;
}
@Exported
public String getDescription() {
return description;
}
/**
* Returns the length-limited description.
* @return The length-limited description.
*/
public @Nonnull String getTruncatedDescription() {
final int maxDescrLength = 100;
if (description == null || description.length() < maxDescrLength) {
return description;
}
final String ending = "...";
final int sz = description.length(), maxTruncLength = maxDescrLength - ending.length();
boolean inTag = false;
int displayChars = 0;
int lastTruncatablePoint = -1;
for (int i=0; i<sz; i++) {
char ch = description.charAt(i);
if(ch == '<') {
inTag = true;
} else if (ch == '>') {
inTag = false;
if (displayChars <= maxTruncLength) {
lastTruncatablePoint = i + 1;
}
}
if (!inTag) {
displayChars++;
if (displayChars <= maxTruncLength && ch == ' ') {
lastTruncatablePoint = i;
}
}
}
String truncDesc = description;
// Could not find a preferred truncatable index, force a trunc at maxTruncLength
if (lastTruncatablePoint == -1)
lastTruncatablePoint = maxTruncLength;
if (displayChars >= maxDescrLength) {
truncDesc = truncDesc.substring(0, lastTruncatablePoint) + ending;
}
return truncDesc;
}
/**
* Gets the string that says how long since this build has started.
*
* @return
* string like "3 minutes" "1 day" etc.
*/
public @Nonnull String getTimestampString() {
long duration = new GregorianCalendar().getTimeInMillis()-timestamp;
return Util.getPastTimeString(duration);
}
/**
* Returns the timestamp formatted in xs:dateTime.
*/
public @Nonnull String getTimestampString2() {
return Util.XS_DATETIME_FORMATTER.format(new Date(timestamp));
}
/**
* Gets the string that says how long the build took to run.
*/
public @Nonnull String getDurationString() {
if (hasntStartedYet()) {
return Messages.Run_NotStartedYet();
} else if (isBuilding()) {
return Messages.Run_InProgressDuration(
Util.getTimeSpanString(System.currentTimeMillis()-startTime));
}
return Util.getTimeSpanString(duration);
}
/**
* Gets the millisecond it took to build.
*/
@Exported
public long getDuration() {
return duration;
}
/**
* Gets the icon color for display.
*/
public @Nonnull BallColor getIconColor() {
if(!isBuilding()) {
// already built
return getResult().color;
}
// a new build is in progress
BallColor baseColor;
RunT pb = getPreviousBuild();
if(pb==null)
baseColor = BallColor.NOTBUILT;
else
baseColor = pb.getIconColor();
return baseColor.anime();
}
/**
* Returns true if the build is still queued and hasn't started yet.
*/
public boolean hasntStartedYet() {
return state ==State.NOT_STARTED;
}
@Override
public String toString() {
return project.getFullName() + " #" + number;
}
@Exported
public String getFullDisplayName() {
return project.getFullDisplayName()+' '+getDisplayName();
}
@Exported
public String getDisplayName() {
return displayName!=null ? displayName : "#"+number;
}
public boolean hasCustomDisplayName() {
return displayName!=null;
}
/**
* @param value
* Set to null to revert back to the default "#NNN".
*/
public void setDisplayName(String value) throws IOException {
checkPermission(UPDATE);
this.displayName = value;
save();
}
@Exported(visibility=2)
public int getNumber() {
return number;
}
/**
* Called by {@link RunMap} to obtain a reference to this run.
* @return Reference to the build. Never null
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#createReference
* @since 1.556
*/
protected @Nonnull BuildReference<RunT> createReference() {
return new BuildReference<RunT>(getId(), _this());
}
/**
* Called by {@link RunMap} to drop bi-directional links in preparation for
* deleting a build.
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#dropLinks
* @since 1.556
*/
protected void dropLinks() {
if(nextBuild!=null)
nextBuild.previousBuild = previousBuild;
if(previousBuild!=null)
previousBuild.nextBuild = nextBuild;
}
/**
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#getPreviousBuild
*/
public @CheckForNull RunT getPreviousBuild() {
return previousBuild;
}
/**
* Gets the most recent {@linkplain #isBuilding() completed} build excluding 'this' Run itself.
*/
public final @CheckForNull RunT getPreviousCompletedBuild() {
RunT r=getPreviousBuild();
while (r!=null && r.isBuilding())
r=r.getPreviousBuild();
return r;
}
/**
* Obtains the next younger build in progress. It uses a skip-pointer so that we can compute this without
* O(n) computation time. This method also fixes up the skip list as we go, in a way that's concurrency safe.
*
* <p>
* We basically follow the existing skip list, and wherever we find a non-optimal pointer, we remember them
* in 'fixUp' and update them later.
*/
public final @CheckForNull RunT getPreviousBuildInProgress() {
if(previousBuildInProgress==this) return null; // the most common case
List<RunT> fixUp = new ArrayList<RunT>();
RunT r = _this(); // 'r' is the source of the pointer (so that we can add it to fix up if we find that the target of the pointer is inefficient.)
RunT answer;
while (true) {
RunT n = r.previousBuildInProgress;
if (n==null) {// no field computed yet.
n=r.getPreviousBuild();
fixUp.add(r);
}
if (r==n || n==null) {
// this indicates that we know there's no build in progress beyond this point
answer = null;
break;
}
if (n.isBuilding()) {
// we now know 'n' is the target we wanted
answer = n;
break;
}
fixUp.add(r); // r contains the stale 'previousBuildInProgress' back pointer
r = n;
}
// fix up so that the next look up will run faster
for (RunT f : fixUp)
f.previousBuildInProgress = answer==null ? f : answer;
return answer;
}
/**
* Returns the last build that was actually built - i.e., skipping any with Result.NOT_BUILT
*/
public @CheckForNull RunT getPreviousBuiltBuild() {
RunT r=getPreviousBuild();
// in certain situations (aborted m2 builds) r.getResult() can still be null, although it should theoretically never happen
while( r!=null && (r.getResult() == null || r.getResult()==Result.NOT_BUILT) )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last build that didn't fail before this build.
*/
public @CheckForNull RunT getPreviousNotFailedBuild() {
RunT r=getPreviousBuild();
while( r!=null && r.getResult()==Result.FAILURE )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last failed build before this build.
*/
public @CheckForNull RunT getPreviousFailedBuild() {
RunT r=getPreviousBuild();
while( r!=null && r.getResult()!=Result.FAILURE )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last successful build before this build.
* @since 1.383
*/
public @CheckForNull RunT getPreviousSuccessfulBuild() {
RunT r=getPreviousBuild();
while( r!=null && r.getResult()!=Result.SUCCESS )
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last {@code numberOfBuilds} builds with a build result ≥ {@code threshold}.
*
* @param numberOfBuilds the desired number of builds
* @param threshold the build result threshold
* @return a list with the builds (youngest build first).
* May be smaller than 'numberOfBuilds' or even empty
* if not enough builds satisfying the threshold have been found. Never null.
* @since 1.383
*/
public @Nonnull List<RunT> getPreviousBuildsOverThreshold(int numberOfBuilds, @Nonnull Result threshold) {
List<RunT> builds = new ArrayList<RunT>(numberOfBuilds);
RunT r = getPreviousBuild();
while (r != null && builds.size() < numberOfBuilds) {
if (!r.isBuilding() &&
(r.getResult() != null && r.getResult().isBetterOrEqualTo(threshold))) {
builds.add(r);
}
r = r.getPreviousBuild();
}
return builds;
}
/**
* @see jenkins.model.lazy.LazyBuildMixIn.RunMixIn#getNextBuild
*/
public @CheckForNull RunT getNextBuild() {
return nextBuild;
}
/**
* Returns the URL of this {@link Run}, relative to the context root of Hudson.
*
* @return
* String like "job/foo/32/" with trailing slash but no leading slash.
*/
// I really messed this up. I'm hoping to fix this some time
// it shouldn't have trailing '/', and instead it should have leading '/'
public @Nonnull String getUrl() {
// RUN may be accessed using permalinks, as "/lastSuccessful" or other, so try to retrieve this base URL
// looking for "this" in the current request ancestors
// @see also {@link AbstractItem#getUrl}
StaplerRequest req = Stapler.getCurrentRequest();
if (req != null) {
String seed = Functions.getNearestAncestorUrl(req,this);
if(seed!=null) {
// trim off the context path portion and leading '/', but add trailing '/'
return seed.substring(req.getContextPath().length()+1)+'/';
}
}
return project.getUrl()+getNumber()+'/';
}
/**
* Obtains the absolute URL to this build.
*
* @deprecated
* This method shall <b>NEVER</b> be used during HTML page rendering, as it's too easy for
* misconfiguration to break this value, with network set up like Apache reverse proxy.
* This method is only intended for the remote API clients who cannot resolve relative references.
*/
@Exported(visibility=2,name="url")
@Deprecated
public final @Nonnull String getAbsoluteUrl() {
return project.getAbsoluteUrl()+getNumber()+'/';
}
public final @Nonnull String getSearchUrl() {
return getNumber()+"/";
}
/**
* Unique ID of this build.
* Usually the decimal form of {@link #number}, but may be a formatted timestamp for historical builds.
*/
@Exported
public @Nonnull String getId() {
return id != null ? id : Integer.toString(number);
}
@Override
public @CheckForNull Descriptor getDescriptorByName(String className) {
return Jenkins.getInstance().getDescriptorByName(className);
}
/**
* Get the root directory of this {@link Run} on the master.
* Files related to this {@link Run} should be stored below this directory.
* @return Root directory of this {@link Run} on the master. Never null
*/
@Override
public @Nonnull File getRootDir() {
return new File(project.getBuildDir(), Integer.toString(number));
}
/**
* Gets an object responsible for storing and retrieving build artifacts.
* If {@link #pickArtifactManager} has previously been called on this build,
* and a nondefault manager selected, that will be returned.
* Otherwise (including if we are loading a historical build created prior to this feature) {@link StandardArtifactManager} is used.
* <p>This method should be used when existing artifacts are to be loaded, displayed, or removed.
* If adding artifacts, use {@link #pickArtifactManager} instead.
* @return an appropriate artifact manager
* @since 1.532
*/
public final @Nonnull ArtifactManager getArtifactManager() {
return artifactManager != null ? artifactManager : new StandardArtifactManager(this);
}
/**
* Selects an object responsible for storing and retrieving build artifacts.
* The first time this is called on a running build, {@link ArtifactManagerConfiguration} is checked
* to see if one will handle this build.
* If so, that manager is saved in the build and it will be used henceforth.
* If no manager claimed the build, {@link StandardArtifactManager} is used.
* <p>This method should be used when a build step expects to archive some artifacts.
* If only displaying existing artifacts, use {@link #getArtifactManager} instead.
* @return an appropriate artifact manager
* @throws IOException if a custom manager was selected but the selection could not be saved
* @since 1.532
*/
public final synchronized @Nonnull ArtifactManager pickArtifactManager() throws IOException {
if (artifactManager != null) {
return artifactManager;
} else {
for (ArtifactManagerFactory f : ArtifactManagerConfiguration.get().getArtifactManagerFactories()) {
ArtifactManager mgr = f.managerFor(this);
if (mgr != null) {
artifactManager = mgr;
save();
return mgr;
}
}
return new StandardArtifactManager(this);
}
}
/**
* Gets the directory where the artifacts are archived.
* @deprecated Should only be used from {@link StandardArtifactManager} or subclasses.
*/
@Deprecated
public File getArtifactsDir() {
return new File(getRootDir(),"archive");
}
/**
* Gets the artifacts (relative to {@link #getArtifactsDir()}.
* @return The list can be empty but never null
*/
@Exported
public @Nonnull List<Artifact> getArtifacts() {
return getArtifactsUpTo(Integer.MAX_VALUE);
}
/**
* Gets the first N artifacts.
* @return The list can be empty but never null
*/
public @Nonnull List<Artifact> getArtifactsUpTo(int artifactsNumber) {
ArtifactList r = new ArtifactList();
try {
addArtifacts(getArtifactManager().root(), "", "", r, null, artifactsNumber);
} catch (IOException x) {
LOGGER.log(Level.WARNING, null, x);
}
r.computeDisplayName();
return r;
}
/**
* Check if the {@link Run} contains artifacts.
* The strange method name is so that we can access it from EL.
* @return true if this run has any artifacts
*/
public boolean getHasArtifacts() {
return !getArtifactsUpTo(1).isEmpty();
}
private int addArtifacts(@Nonnull VirtualFile dir,
@Nonnull String path, @Nonnull String pathHref,
@Nonnull ArtifactList r, @Nonnull Artifact parent, int upTo) throws IOException {
VirtualFile[] kids = dir.list();
Arrays.sort(kids);
int n = 0;
for (VirtualFile sub : kids) {
String child = sub.getName();
String childPath = path + child;
String childHref = pathHref + Util.rawEncode(child);
String length = sub.isFile() ? String.valueOf(sub.length()) : "";
boolean collapsed = (kids.length==1 && parent!=null);
Artifact a;
if (collapsed) {
// Collapse single items into parent node where possible:
a = new Artifact(parent.getFileName() + '/' + child, childPath,
sub.isDirectory() ? null : childHref, length,
parent.getTreeNodeId());
r.tree.put(a, r.tree.remove(parent));
} else {
// Use null href for a directory:
a = new Artifact(child, childPath,
sub.isDirectory() ? null : childHref, length,
"n" + ++r.idSeq);
r.tree.put(a, parent!=null ? parent.getTreeNodeId() : null);
}
if (sub.isDirectory()) {
n += addArtifacts(sub, childPath + '/', childHref + '/', r, a, upTo-n);
if (n>=upTo) break;
} else {
// Don't store collapsed path in ArrayList (for correct data in external API)
r.add(collapsed ? new Artifact(child, a.relativePath, a.href, length, a.treeNodeId) : a);
if (++n>=upTo) break;
}
}
return n;
}
/**
* Maximum number of artifacts to list before using switching to the tree view.
*/
public static final int LIST_CUTOFF = Integer.parseInt(SystemProperties.getString("hudson.model.Run.ArtifactList.listCutoff", "16"));
/**
* Maximum number of artifacts to show in tree view before just showing a link.
*/
public static final int TREE_CUTOFF = Integer.parseInt(SystemProperties.getString("hudson.model.Run.ArtifactList.treeCutoff", "40"));
// ..and then "too many"
public final class ArtifactList extends ArrayList<Artifact> {
private static final long serialVersionUID = 1L;
/**
* Map of Artifact to treeNodeId of parent node in tree view.
* Contains Artifact objects for directories and files (the ArrayList contains only files).
*/
private LinkedHashMap<Artifact,String> tree = new LinkedHashMap<Artifact,String>();
private int idSeq = 0;
public Map<Artifact,String> getTree() {
return tree;
}
public void computeDisplayName() {
if(size()>LIST_CUTOFF) return; // we are not going to display file names, so no point in computing this
int maxDepth = 0;
int[] len = new int[size()];
String[][] tokens = new String[size()][];
for( int i=0; i<tokens.length; i++ ) {
tokens[i] = get(i).relativePath.split("[\\\\/]+");
maxDepth = Math.max(maxDepth,tokens[i].length);
len[i] = 1;
}
boolean collision;
int depth=0;
do {
collision = false;
Map<String,Integer/*index*/> names = new HashMap<String,Integer>();
for (int i = 0; i < tokens.length; i++) {
String[] token = tokens[i];
String displayName = combineLast(token,len[i]);
Integer j = names.put(displayName, i);
if(j!=null) {
collision = true;
if(j>=0)
len[j]++;
len[i]++;
names.put(displayName,-1); // occupy this name but don't let len[i] incremented with additional collisions
}
}
} while(collision && depth++<maxDepth);
for (int i = 0; i < tokens.length; i++)
get(i).displayPath = combineLast(tokens[i],len[i]);
// OUTER:
// for( int n=1; n<maxLen; n++ ) {
// // if we just display the last n token, would it be suffice for disambiguation?
// Set<String> names = new HashSet<String>();
// for (String[] token : tokens) {
// if(!names.add(combineLast(token,n)))
// continue OUTER; // collision. Increase n and try again
// }
//
// // this n successfully disambiguates
// for (int i = 0; i < tokens.length; i++) {
// String[] token = tokens[i];
// get(i).displayPath = combineLast(token,n);
// }
// return;
// }
// // it's impossible to get here, as that means
// // we have the same artifacts archived twice, but be defensive
// for (Artifact a : this)
// a.displayPath = a.relativePath;
}
/**
* Combines last N token into the "a/b/c" form.
*/
private String combineLast(String[] token, int n) {
StringBuilder buf = new StringBuilder();
for( int i=Math.max(0,token.length-n); i<token.length; i++ ) {
if(buf.length()>0) buf.append('/');
buf.append(token[i]);
}
return buf.toString();
}
}
/**
* A build artifact.
*/
@ExportedBean
public class Artifact {
/**
* Relative path name from artifacts root.
*/
@Exported(visibility=3)
public final String relativePath;
/**
* Truncated form of {@link #relativePath} just enough
* to disambiguate {@link Artifact}s.
*/
/*package*/ String displayPath;
/**
* The filename of the artifact.
* (though when directories with single items are collapsed for tree view, name may
* include multiple path components, like "dist/pkg/mypkg")
*/
private String name;
/**
* Properly encoded relativePath for use in URLs. This field is null for directories.
*/
private String href;
/**
* Id of this node for use in tree view.
*/
private String treeNodeId;
/**
*length of this artifact for files.
*/
private String length;
/*package for test*/ Artifact(String name, String relativePath, String href, String len, String treeNodeId) {
this.name = name;
this.relativePath = relativePath;
this.href = href;
this.treeNodeId = treeNodeId;
this.length = len;
}
/**
* Gets the artifact file.
* @deprecated May not be meaningful with custom artifact managers. Use {@link ArtifactManager#root} plus {@link VirtualFile#child} with {@link #relativePath} instead.
*/
@Deprecated
public @Nonnull File getFile() {
return new File(getArtifactsDir(),relativePath);
}
/**
* Returns just the file name portion, without the path.
*/
@Exported(visibility=3)
public String getFileName() {
return name;
}
@Exported(visibility=3)
public String getDisplayPath() {
return displayPath;
}
public String getHref() {
return href;
}
public String getLength() {
return length;
}
public long getFileSize(){
return Long.decode(length);
}
public String getTreeNodeId() {
return treeNodeId;
}
@Override
public String toString() {
return relativePath;
}
}
/**
* Returns the log file.
* @return The file may reference both uncompressed or compressed logs
*/
public @Nonnull File getLogFile() {
File rawF = new File(getRootDir(), "log");
if (rawF.isFile()) {
return rawF;
}
File gzF = new File(getRootDir(), "log.gz");
if (gzF.isFile()) {
return gzF;
}
//If both fail, return the standard, uncompressed log file
return rawF;
}
/**
* Returns an input stream that reads from the log file.
* It will use a gzip-compressed log file (log.gz) if that exists.
*
* @throws IOException
* @return An input stream from the log file.
* If the log file does not exist, the error message will be returned to the output.
* @since 1.349
*/
public @Nonnull InputStream getLogInputStream() throws IOException {
File logFile = getLogFile();
if (logFile.exists() ) {
// Checking if a ".gz" file was return
try {
InputStream fis = Files.newInputStream(logFile.toPath());
if (logFile.getName().endsWith(".gz")) {
return new GZIPInputStream(fis);
} else {
return fis;
}
} catch (InvalidPathException e) {
throw new IOException(e);
}
}
String message = "No such file: " + logFile;
return new ByteArrayInputStream(charset != null ? message.getBytes(charset) : message.getBytes());
}
public @Nonnull Reader getLogReader() throws IOException {
if (charset==null) return new InputStreamReader(getLogInputStream());
else return new InputStreamReader(getLogInputStream(),charset);
}
/**
* Used from <tt>console.jelly</tt> to write annotated log to the given output.
*
* @since 1.349
*/
public void writeLogTo(long offset, @Nonnull XMLOutput out) throws IOException {
try {
getLogText().writeHtmlTo(offset,out.asWriter());
} catch (IOException e) {
// try to fall back to the old getLogInputStream()
// mainly to support .gz compressed files
// In this case, console annotation handling will be turned off.
try (InputStream input = getLogInputStream()) {
IOUtils.copy(input, out.asWriter());
}
}
}
/**
* Writes the complete log from the start to finish to the {@link OutputStream}.
*
* If someone is still writing to the log, this method will not return until the whole log
* file gets written out.
* <p>
* The method does not close the {@link OutputStream}.
*/
public void writeWholeLogTo(@Nonnull OutputStream out) throws IOException, InterruptedException {
long pos = 0;
AnnotatedLargeText logText;
logText = getLogText();
pos = logText.writeLogTo(pos, out);
while (!logText.isComplete()) {
// Instead of us hitting the log file as many times as possible, instead we get the information once every
// second to avoid CPU usage getting very high.
Thread.sleep(1000);
logText = getLogText();
pos = logText.writeLogTo(pos, out);
}
}
/**
* Used to URL-bind {@link AnnotatedLargeText}.
* @return A {@link Run} log with annotations
*/
public @Nonnull AnnotatedLargeText getLogText() {
return new AnnotatedLargeText(getLogFile(),getCharset(),!isLogUpdated(),this);
}
@Override
protected @Nonnull SearchIndexBuilder makeSearchIndex() {
SearchIndexBuilder builder = super.makeSearchIndex()
.add("console")
.add("changes");
for (Action a : getAllActions()) {
if(a.getIconFileName()!=null)
builder.add(a.getUrlName());
}
return builder;
}
public @Nonnull Api getApi() {
return new Api(this);
}
@Override
public void checkPermission(@Nonnull Permission p) {
getACL().checkPermission(p);
}
@Override
public boolean hasPermission(@Nonnull Permission p) {
return getACL().hasPermission(p);
}
@Override
public ACL getACL() {
// for now, don't maintain ACL per run, and do it at project level
return getParent().getACL();
}
/**
* Deletes this build's artifacts.
*
* @throws IOException
* if we fail to delete.
*
* @since 1.350
*/
public synchronized void deleteArtifacts() throws IOException {
try {
getArtifactManager().delete();
} catch (InterruptedException x) {
throw new IOException(x);
}
}
/**
* Deletes this build and its entire log
*
* @throws IOException
* if we fail to delete.
*/
public void delete() throws IOException {
File rootDir = getRootDir();
if (!rootDir.isDirectory()) {
throw new IOException(this + ": " + rootDir + " looks to have already been deleted; siblings: " + Arrays.toString(project.getBuildDir().list()));
}
RunListener.fireDeleted(this);
synchronized (this) { // avoid holding a lock while calling plugin impls of onDeleted
File tmp = new File(rootDir.getParentFile(),'.'+rootDir.getName());
if (tmp.exists()) {
Util.deleteRecursive(tmp);
}
// TODO on Java 7 prefer: Files.move(rootDir.toPath(), tmp.toPath(), StandardCopyOption.ATOMIC_MOVE)
boolean renamingSucceeded = rootDir.renameTo(tmp);
Util.deleteRecursive(tmp);
// some user reported that they see some left-over .xyz files in the workspace,
// so just to make sure we've really deleted it, schedule the deletion on VM exit, too.
if(tmp.exists())
tmp.deleteOnExit();
if(!renamingSucceeded)
throw new IOException(rootDir+" is in use");
LOGGER.log(FINE, "{0}: {1} successfully deleted", new Object[] {this, rootDir});
removeRunFromParent();
}
}
@SuppressWarnings("unchecked") // seems this is too clever for Java's type system?
private void removeRunFromParent() {
getParent().removeRun((RunT)this);
}
/**
* @see CheckPoint#report()
*/
/*package*/ static void reportCheckpoint(@Nonnull CheckPoint id) {
Run<?,?>.RunExecution exec = RunnerStack.INSTANCE.peek();
if (exec == null) {
return;
}
exec.checkpoints.report(id);
}
/**
* @see CheckPoint#block()
*/
/*package*/ static void waitForCheckpoint(@Nonnull CheckPoint id, @CheckForNull BuildListener listener, @CheckForNull String waiter) throws InterruptedException {
while(true) {
Run<?,?>.RunExecution exec = RunnerStack.INSTANCE.peek();
if (exec == null) {
return;
}
Run b = exec.getBuild().getPreviousBuildInProgress();
if(b==null) return; // no pending earlier build
Run.RunExecution runner = b.runner;
if(runner==null) {
// polled at the wrong moment. try again.
Thread.sleep(0);
continue;
}
if(runner.checkpoints.waitForCheckPoint(id, listener, waiter))
return; // confirmed that the previous build reached the check point
// the previous build finished without ever reaching the check point. try again.
}
}
/**
* @deprecated as of 1.467
* Please use {@link RunExecution}
*/
@Deprecated
protected abstract class Runner extends RunExecution {}
/**
* Object that lives while the build is executed, to keep track of things that
* are needed only during the build.
*/
public abstract class RunExecution {
/**
* Keeps track of the check points attained by a build, and abstracts away the synchronization needed to
* maintain this data structure.
*/
private final class CheckpointSet {
/**
* Stages of the builds that this runner has completed. This is used for concurrent {@link RunExecution}s to
* coordinate and serialize their executions where necessary.
*/
private final Set<CheckPoint> checkpoints = new HashSet<CheckPoint>();
private boolean allDone;
protected synchronized void report(@Nonnull CheckPoint identifier) {
checkpoints.add(identifier);
notifyAll();
}
protected synchronized boolean waitForCheckPoint(@Nonnull CheckPoint identifier, @CheckForNull BuildListener listener, @CheckForNull String waiter) throws InterruptedException {
final Thread t = Thread.currentThread();
final String oldName = t.getName();
t.setName(oldName + " : waiting for " + identifier + " on " + getFullDisplayName() + " from " + waiter);
try {
boolean first = true;
while (!allDone && !checkpoints.contains(identifier)) {
if (first && listener != null && waiter != null) {
listener.getLogger().println(Messages.Run__is_waiting_for_a_checkpoint_on_(waiter, getFullDisplayName()));
}
wait();
first = false;
}
return checkpoints.contains(identifier);
} finally {
t.setName(oldName);
}
}
/**
* Notifies that the build is fully completed and all the checkpoint locks be released.
*/
private synchronized void allDone() {
allDone = true;
notifyAll();
}
}
private final CheckpointSet checkpoints = new CheckpointSet();
private final Map<Object,Object> attributes = new HashMap<Object, Object>();
/**
* Performs the main build and returns the status code.
*
* @throws Exception
* exception will be recorded and the build will be considered a failure.
*/
public abstract @Nonnull Result run(@Nonnull BuildListener listener ) throws Exception, RunnerAbortedException;
/**
* Performs the post-build action.
* <p>
* This method is called after {@linkplain #run(BuildListener) the main portion of the build is completed.}
* This is a good opportunity to do notifications based on the result
* of the build. When this method is called, the build is not really
* finalized yet, and the build is still considered in progress --- for example,
* even if the build is successful, this build still won't be picked up
* by {@link Job#getLastSuccessfulBuild()}.
*/
public abstract void post(@Nonnull BuildListener listener ) throws Exception;
/**
* Performs final clean up action.
* <p>
* This method is called after {@link #post(BuildListener)},
* after the build result is fully finalized. This is the point
* where the build is already considered completed.
* <p>
* Among other things, this is often a necessary pre-condition
* before invoking other builds that depend on this build.
*/
public abstract void cleanUp(@Nonnull BuildListener listener) throws Exception;
public @Nonnull RunT getBuild() {
return _this();
}
public @Nonnull JobT getProject() {
return _this().getParent();
}
/**
* Bag of stuff to allow plugins to store state for the duration of a build
* without persisting it.
*
* @since 1.473
*/
public @Nonnull Map<Object,Object> getAttributes() {
return attributes;
}
}
/**
* Used in {@link Run.RunExecution#run} to indicates that a fatal error in a build
* is reported to {@link BuildListener} and the build should be simply aborted
* without further recording a stack trace.
*/
public static final class RunnerAbortedException extends RuntimeException {
private static final long serialVersionUID = 1L;
}
/**
* @deprecated as of 1.467
* Use {@link #execute(hudson.model.Run.RunExecution)}
*/
@Deprecated
protected final void run(@Nonnull Runner job) {
execute(job);
}
protected final void execute(@Nonnull RunExecution job) {
if(result!=null)
return; // already built.
StreamBuildListener listener=null;
runner = job;
onStartBuilding();
try {
// to set the state to COMPLETE in the end, even if the thread dies abnormally.
// otherwise the queue state becomes inconsistent
long start = System.currentTimeMillis();
try {
try {
Computer computer = Computer.currentComputer();
Charset charset = null;
if (computer != null) {
charset = computer.getDefaultCharset();
this.charset = charset.name();
}
listener = createBuildListener(job, listener, charset);
listener.started(getCauses());
Authentication auth = Jenkins.getAuthentication();
if (!auth.equals(ACL.SYSTEM)) {
String name = auth.getName();
if (!auth.equals(Jenkins.ANONYMOUS)) {
name = ModelHyperlinkNote.encodeTo(User.get(name));
}
listener.getLogger().println(Messages.Run_running_as_(name));
}
RunListener.fireStarted(this,listener);
updateSymlinks(listener);
setResult(job.run(listener));
LOGGER.log(INFO, "{0} main build action completed: {1}", new Object[] {this, result});
CheckPoint.MAIN_COMPLETED.report();
} catch (ThreadDeath t) {
throw t;
} catch( AbortException e ) {// orderly abortion.
result = Result.FAILURE;
listener.error(e.getMessage());
LOGGER.log(FINE, "Build "+this+" aborted",e);
} catch( RunnerAbortedException e ) {// orderly abortion.
result = Result.FAILURE;
LOGGER.log(FINE, "Build "+this+" aborted",e);
} catch( InterruptedException e) {
// aborted
result = Executor.currentExecutor().abortResult();
listener.getLogger().println(Messages.Run_BuildAborted());
Executor.currentExecutor().recordCauseOfInterruption(Run.this,listener);
LOGGER.log(Level.INFO, this + " aborted", e);
} catch( Throwable e ) {
handleFatalBuildProblem(listener,e);
result = Result.FAILURE;
}
// even if the main build fails fatally, try to run post build processing
job.post(listener);
} catch (ThreadDeath t) {
throw t;
} catch( Throwable e ) {
handleFatalBuildProblem(listener,e);
result = Result.FAILURE;
} finally {
long end = System.currentTimeMillis();
duration = Math.max(end - start, 0); // @see HUDSON-5844
// advance the state.
// the significance of doing this is that Jenkins
// will now see this build as completed.
// things like triggering other builds requires this as pre-condition.
// see issue #980.
LOGGER.log(FINER, "moving into POST_PRODUCTION on {0}", this);
state = State.POST_PRODUCTION;
if (listener != null) {
RunListener.fireCompleted(this,listener);
try {
job.cleanUp(listener);
} catch (Exception e) {
handleFatalBuildProblem(listener,e);
// too late to update the result now
}
listener.finished(result);
listener.closeQuietly();
}
try {
save();
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Failed to save build record",e);
}
}
try {
getParent().logRotate();
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Failed to rotate log",e);
}
} finally {
onEndBuilding();
}
}
private StreamBuildListener createBuildListener(@Nonnull RunExecution job, StreamBuildListener listener, Charset charset) throws IOException, InterruptedException {
// don't do buffering so that what's written to the listener
// gets reflected to the file immediately, which can then be
// served to the browser immediately
OutputStream logger;
try {
logger = Files.newOutputStream(getLogFile().toPath(), StandardOpenOption.CREATE, StandardOpenOption.APPEND);
} catch (InvalidPathException e) {
throw new IOException(e);
}
RunT build = job.getBuild();
// Global log filters
for (ConsoleLogFilter filter : ConsoleLogFilter.all()) {
logger = filter.decorateLogger(build, logger);
}
// Project specific log filters
if (project instanceof BuildableItemWithBuildWrappers && build instanceof AbstractBuild) {
BuildableItemWithBuildWrappers biwbw = (BuildableItemWithBuildWrappers) project;
for (BuildWrapper bw : biwbw.getBuildWrappersList()) {
logger = bw.decorateLogger((AbstractBuild) build, logger);
}
}
listener = new StreamBuildListener(logger,charset);
return listener;
}
/**
* Makes sure that {@code lastSuccessful} and {@code lastStable} legacy links in the project’s root directory exist.
* Normally you do not need to call this explicitly, since {@link #execute} does so,
* but this may be needed if you are creating synthetic {@link Run}s as part of a container project (such as Maven builds in a module set).
* You should also ensure that {@link RunListener#fireStarted} and {@link RunListener#fireCompleted} are called.
* @param listener probably unused
* @throws InterruptedException probably not thrown
* @since 1.530
*/
public final void updateSymlinks(@Nonnull TaskListener listener) throws InterruptedException {
createSymlink(listener, "lastSuccessful", PermalinkProjectAction.Permalink.LAST_SUCCESSFUL_BUILD);
createSymlink(listener, "lastStable", PermalinkProjectAction.Permalink.LAST_STABLE_BUILD);
}
/**
* Backward compatibility.
*
* We used to have $JENKINS_HOME/jobs/JOBNAME/lastStable and lastSuccessful symlinked to the appropriate
* builds, but now those are done in {@link PeepholePermalink}. So here, we simply create symlinks that
* resolves to the symlink created by {@link PeepholePermalink}.
*/
private void createSymlink(@Nonnull TaskListener listener, @Nonnull String name, @Nonnull PermalinkProjectAction.Permalink target) throws InterruptedException {
File buildDir = getParent().getBuildDir();
File rootDir = getParent().getRootDir();
String targetDir;
if (buildDir.equals(new File(rootDir, "builds"))) {
targetDir = "builds" + File.separator + target.getId();
} else {
targetDir = buildDir + File.separator + target.getId();
}
Util.createSymlink(rootDir, targetDir, name, listener);
}
/**
* Handles a fatal build problem (exception) that occurred during the build.
*/
private void handleFatalBuildProblem(@Nonnull BuildListener listener, @Nonnull Throwable e) {
if(listener!=null) {
LOGGER.log(FINE, getDisplayName()+" failed to build",e);
if(e instanceof IOException)
Util.displayIOException((IOException)e,listener);
Functions.printStackTrace(e, listener.fatalError(e.getMessage()));
} else {
LOGGER.log(SEVERE, getDisplayName()+" failed to build and we don't even have a listener",e);
}
}
/**
* Called when a job started building.
*/
protected void onStartBuilding() {
LOGGER.log(FINER, "moving to BUILDING on {0}", this);
state = State.BUILDING;
startTime = System.currentTimeMillis();
if (runner!=null)
RunnerStack.INSTANCE.push(runner);
RunListener.fireInitialize(this);
}
/**
* Called when a job finished building normally or abnormally.
*/
protected void onEndBuilding() {
// signal that we've finished building.
state = State.COMPLETED;
LOGGER.log(FINER, "moving to COMPLETED on {0}", this);
if (runner!=null) {
// MavenBuilds may be created without their corresponding runners.
runner.checkpoints.allDone();
runner = null;
RunnerStack.INSTANCE.pop();
}
if (result == null) {
result = Result.FAILURE;
LOGGER.log(WARNING, "{0}: No build result is set, so marking as failure. This should not happen.", this);
}
RunListener.fireFinalized(this);
}
/**
* Save the settings to a file.
*/
public synchronized void save() throws IOException {
if(BulkChange.contains(this)) return;
getDataFile().write(this);
SaveableListener.fireOnChange(this, getDataFile());
}
private @Nonnull XmlFile getDataFile() {
return new XmlFile(XSTREAM,new File(getRootDir(),"build.xml"));
}
/**
* Gets the log of the build as a string.
* @return Returns the log or an empty string if it has not been found
* @deprecated since 2007-11-11.
* Use {@link #getLog(int)} instead as it avoids loading
* the whole log into memory unnecessarily.
*/
@Deprecated
public @Nonnull String getLog() throws IOException {
return Util.loadFile(getLogFile(),getCharset());
}
/**
* Gets the log of the build as a list of strings (one per log line).
* The number of lines returned is constrained by the maxLines parameter.
*
* @param maxLines The maximum number of log lines to return. If the log
* is bigger than this, only the most recent lines are returned.
* @return A list of log lines. Will have no more than maxLines elements.
* @throws IOException If there is a problem reading the log file.
*/
public @Nonnull List<String> getLog(int maxLines) throws IOException {
if (maxLines == 0) {
return Collections.emptyList();
}
int lines = 0;
long filePointer;
final List<String> lastLines = new ArrayList<>(Math.min(maxLines, 128));
final List<Byte> bytes = new ArrayList<>();
try (RandomAccessFile fileHandler = new RandomAccessFile(getLogFile(), "r")) {
long fileLength = fileHandler.length() - 1;
for (filePointer = fileLength; filePointer != -1 && maxLines != lines; filePointer--) {
fileHandler.seek(filePointer);
byte readByte = fileHandler.readByte();
if (readByte == 0x0A) {
if (filePointer < fileLength) {
lines = lines + 1;
lastLines.add(convertBytesToString(bytes));
bytes.clear();
}
} else if (readByte != 0xD) {
bytes.add(readByte);
}
}
}
if (lines != maxLines) {
lastLines.add(convertBytesToString(bytes));
}
Collections.reverse(lastLines);
// If the log has been truncated, include that information.
// Use set (replaces the first element) rather than add so that
// the list doesn't grow beyond the specified maximum number of lines.
if (lines == maxLines) {
lastLines.set(0, "[...truncated " + Functions.humanReadableByteSize(filePointer)+ "...]");
}
return ConsoleNote.removeNotes(lastLines);
}
private String convertBytesToString(List<Byte> bytes) {
Collections.reverse(bytes);
Byte[] byteArray = bytes.toArray(new Byte[bytes.size()]);
return new String(ArrayUtils.toPrimitive(byteArray), getCharset());
}
public void doBuildStatus( StaplerRequest req, StaplerResponse rsp ) throws IOException {
rsp.sendRedirect2(req.getContextPath()+"/images/48x48/"+getBuildStatusUrl());
}
public @Nonnull String getBuildStatusUrl() {
return getIconColor().getImage();
}
public String getBuildStatusIconClassName() {
return getIconColor().getIconClassName();
}
public static class Summary {
/**
* Is this build worse or better, compared to the previous build?
*/
public boolean isWorse;
public String message;
public Summary(boolean worse, String message) {
this.isWorse = worse;
this.message = message;
}
}
/**
* Used to implement {@link #getBuildStatusSummary}.
* @since 1.575
*/
public static abstract class StatusSummarizer implements ExtensionPoint {
/**
* Possibly summarizes the reasons for a build’s status.
* @param run a completed build
* @param trend the result of {@link ResultTrend#getResultTrend(hudson.model.Run)} on {@code run} (precomputed for efficiency)
* @return a summary, or null to fall back to other summarizers or built-in behavior
*/
public abstract @CheckForNull Summary summarize(@Nonnull Run<?,?> run, @Nonnull ResultTrend trend);
}
/**
* Gets an object which represents the single line summary of the status of this build
* (especially in comparison with the previous build.)
* @see StatusSummarizer
*/
public @Nonnull Summary getBuildStatusSummary() {
if (isBuilding()) {
return new Summary(false, Messages.Run_Summary_Unknown());
}
ResultTrend trend = ResultTrend.getResultTrend(this);
for (StatusSummarizer summarizer : ExtensionList.lookup(StatusSummarizer.class)) {
Summary summary = summarizer.summarize(this, trend);
if (summary != null) {
return summary;
}
}
switch (trend) {
case ABORTED : return new Summary(false, Messages.Run_Summary_Aborted());
case NOT_BUILT : return new Summary(false, Messages.Run_Summary_NotBuilt());
case FAILURE : return new Summary(true, Messages.Run_Summary_BrokenSinceThisBuild());
case STILL_FAILING :
RunT since = getPreviousNotFailedBuild();
if(since==null)
return new Summary(false, Messages.Run_Summary_BrokenForALongTime());
RunT failedBuild = since.getNextBuild();
return new Summary(false, Messages.Run_Summary_BrokenSince(failedBuild.getDisplayName()));
case NOW_UNSTABLE:
case STILL_UNSTABLE :
return new Summary(false, Messages.Run_Summary_Unstable());
case UNSTABLE :
return new Summary(true, Messages.Run_Summary_Unstable());
case SUCCESS :
return new Summary(false, Messages.Run_Summary_Stable());
case FIXED :
return new Summary(false, Messages.Run_Summary_BackToNormal());
}
return new Summary(false, Messages.Run_Summary_Unknown());
}
/**
* Serves the artifacts.
* @throws AccessDeniedException Access denied
*/
public @Nonnull DirectoryBrowserSupport doArtifact() {
if(Functions.isArtifactsPermissionEnabled()) {
checkPermission(ARTIFACTS);
}
return new DirectoryBrowserSupport(this, getArtifactManager().root(), Messages.Run_ArtifactsBrowserTitle(project.getDisplayName(), getDisplayName()), "package.png", true);
}
/**
* Returns the build number in the body.
*/
public void doBuildNumber(StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain");
rsp.setCharacterEncoding("US-ASCII");
rsp.setStatus(HttpServletResponse.SC_OK);
rsp.getWriter().print(number);
}
/**
* Returns the build time stamp in the body.
*/
public void doBuildTimestamp( StaplerRequest req, StaplerResponse rsp, @QueryParameter String format) throws IOException {
rsp.setContentType("text/plain");
rsp.setCharacterEncoding("US-ASCII");
rsp.setStatus(HttpServletResponse.SC_OK);
DateFormat df = format==null ?
DateFormat.getDateTimeInstance(DateFormat.SHORT,DateFormat.SHORT, Locale.ENGLISH) :
new SimpleDateFormat(format,req.getLocale());
rsp.getWriter().print(df.format(getTime()));
}
/**
* Sends out the raw console output.
*/
public void doConsoleText(StaplerRequest req, StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain;charset=UTF-8");
;
try (InputStream input = getLogInputStream();
OutputStream os = rsp.getCompressedOutputStream(req);
PlainTextConsoleOutputStream out = new PlainTextConsoleOutputStream(os)) {
IOUtils.copy(input, out);
}
}
/**
* Handles incremental log output.
* @deprecated as of 1.352
* Use {@code getLogText().doProgressiveText(req,rsp)}
*/
@Deprecated
public void doProgressiveLog( StaplerRequest req, StaplerResponse rsp) throws IOException {
getLogText().doProgressText(req,rsp);
}
/**
* Checks whether keep status can be toggled.
* Normally it can, but if there is a complex reason (from subclasses) why this build must be kept, the toggle is meaningless.
* @return true if {@link #doToggleLogKeep} and {@link #keepLog(boolean)} and {@link #keepLog()} are options
* @since 1.510
*/
public boolean canToggleLogKeep() {
if (!keepLog && isKeepLog()) {
// Definitely prevented.
return false;
}
// TODO may be that keepLog is on (perhaps toggler earlier) yet isKeepLog() would be true anyway.
// In such a case this will incorrectly return true and logKeep.jelly will allow the toggle.
// However at least then (after redirecting to the same page) the toggle button will correctly disappear.
return true;
}
@RequirePOST
public void doToggleLogKeep( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
keepLog(!keepLog);
rsp.forwardToPreviousPage(req);
}
/**
* Marks this build to keep the log.
*/
@CLIMethod(name="keep-build")
public final void keepLog() throws IOException {
keepLog(true);
}
public void keepLog(boolean newValue) throws IOException {
checkPermission(newValue ? UPDATE : DELETE);
keepLog = newValue;
save();
}
/**
* Deletes the build when the button is pressed.
*/
@RequirePOST
public void doDoDelete( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
checkPermission(DELETE);
// We should not simply delete the build if it has been explicitly
// marked to be preserved, or if the build should not be deleted
// due to dependencies!
String why = getWhyKeepLog();
if (why!=null) {
sendError(Messages.Run_UnableToDelete(getFullDisplayName(), why), req, rsp);
return;
}
try{
delete();
}
catch(IOException ex){
req.setAttribute("stackTraces", Functions.printThrowable(ex));
req.getView(this, "delete-retry.jelly").forward(req, rsp);
return;
}
rsp.sendRedirect2(req.getContextPath()+'/' + getParent().getUrl());
}
public void setDescription(String description) throws IOException {
checkPermission(UPDATE);
this.description = description;
save();
}
/**
* Accepts the new description.
*/
@RequirePOST
public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
setDescription(req.getParameter("description"));
rsp.sendRedirect("."); // go to the top page
}
/**
* @deprecated as of 1.292
* Use {@link #getEnvironment(TaskListener)} instead.
*/
@Deprecated
public Map<String,String> getEnvVars() {
LOGGER.log(WARNING, "deprecated call to Run.getEnvVars\n\tat {0}", new Throwable().getStackTrace()[1]);
try {
return getEnvironment(new LogTaskListener(LOGGER, Level.INFO));
} catch (IOException e) {
return new EnvVars();
} catch (InterruptedException e) {
return new EnvVars();
}
}
/**
* @deprecated as of 1.305 use {@link #getEnvironment(TaskListener)}
*/
@Deprecated
public EnvVars getEnvironment() throws IOException, InterruptedException {
LOGGER.log(WARNING, "deprecated call to Run.getEnvironment\n\tat {0}", new Throwable().getStackTrace()[1]);
return getEnvironment(new LogTaskListener(LOGGER, Level.INFO));
}
/**
* Returns the map that contains environmental variables to be used for launching
* processes for this build.
*
* <p>
* {@link hudson.tasks.BuildStep}s that invoke external processes should use this.
* This allows {@link BuildWrapper}s and other project configurations (such as JDK selection)
* to take effect.
*
* <p>
* Unlike earlier {@link #getEnvVars()}, this map contains the whole environment,
* not just the overrides, so one can introspect values to change its behavior.
*
* @return the map with the environmental variables.
* @since 1.305
*/
public @Nonnull EnvVars getEnvironment(@Nonnull TaskListener listener) throws IOException, InterruptedException {
Computer c = Computer.currentComputer();
Node n = c==null ? null : c.getNode();
EnvVars env = getParent().getEnvironment(n,listener);
env.putAll(getCharacteristicEnvVars());
// apply them in a reverse order so that higher ordinal ones can modify values added by lower ordinal ones
for (EnvironmentContributor ec : EnvironmentContributor.all().reverseView())
ec.buildEnvironmentFor(this,env,listener);
return env;
}
/**
* Builds up the environment variable map that's sufficient to identify a process
* as ours. This is used to kill run-away processes via {@link ProcessTree#killAll(Map)}.
*/
public @Nonnull final EnvVars getCharacteristicEnvVars() {
EnvVars env = getParent().getCharacteristicEnvVars();
env.put("BUILD_NUMBER",String.valueOf(number));
env.put("BUILD_ID",getId());
env.put("BUILD_TAG","jenkins-"+getParent().getFullName().replace('/', '-')+"-"+number);
return env;
}
/**
* Produces an identifier for this run unique in the system.
* @return the {@link Job#getFullName}, then {@code #}, then {@link #getNumber}
* @see #fromExternalizableId
*/
public @Nonnull String getExternalizableId() {
return project.getFullName() + "#" + getNumber();
}
/**
* Tries to find a run from an persisted identifier.
* @param id as produced by {@link #getExternalizableId}
* @return the same run, or null if the job or run was not found
* @throws IllegalArgumentException if the ID is malformed
*/
public @CheckForNull static Run<?,?> fromExternalizableId(String id) throws IllegalArgumentException {
int hash = id.lastIndexOf('#');
if (hash <= 0) {
throw new IllegalArgumentException("Invalid id");
}
String jobName = id.substring(0, hash);
int number;
try {
number = Integer.parseInt(id.substring(hash + 1));
} catch (NumberFormatException x) {
throw new IllegalArgumentException(x);
}
Jenkins j = Jenkins.getInstance();
Job<?,?> job = j.getItemByFullName(jobName, Job.class);
if (job == null) {
return null;
}
return job.getBuildByNumber(number);
}
/**
* Returns the estimated duration for this run if it is currently running.
* Default to {@link Job#getEstimatedDuration()}, may be overridden in subclasses
* if duration may depend on run specific parameters (like incremental Maven builds).
*
* @return the estimated duration in milliseconds
* @since 1.383
*/
@Exported
public long getEstimatedDuration() {
return project.getEstimatedDuration();
}
@RequirePOST
public @Nonnull HttpResponse doConfigSubmit( StaplerRequest req ) throws IOException, ServletException, FormException {
checkPermission(UPDATE);
try (BulkChange bc = new BulkChange(this)) {
JSONObject json = req.getSubmittedForm();
submit(json);
bc.commit();
}
return FormApply.success(".");
}
protected void submit(JSONObject json) throws IOException {
setDisplayName(Util.fixEmptyAndTrim(json.getString("displayName")));
setDescription(json.getString("description"));
}
public static final XStream XSTREAM = new XStream2();
/**
* Alias to {@link #XSTREAM} so that one can access additional methods on {@link XStream2} more easily.
*/
public static final XStream2 XSTREAM2 = (XStream2)XSTREAM;
static {
XSTREAM.alias("build",FreeStyleBuild.class);
XSTREAM.registerConverter(Result.conv);
}
private static final Logger LOGGER = Logger.getLogger(Run.class.getName());
/**
* Sort by date. Newer ones first.
*/
public static final Comparator<Run> ORDER_BY_DATE = new Comparator<Run>() {
public int compare(@Nonnull Run lhs, @Nonnull Run rhs) {
long lt = lhs.getTimeInMillis();
long rt = rhs.getTimeInMillis();
if(lt>rt) return -1;
if(lt<rt) return 1;
return 0;
}
};
/**
* {@link FeedAdapter} to produce feed from the summary of this build.
*/
public static final FeedAdapter<Run> FEED_ADAPTER = new DefaultFeedAdapter();
/**
* {@link FeedAdapter} to produce feeds to show one build per project.
*/
public static final FeedAdapter<Run> FEED_ADAPTER_LATEST = new DefaultFeedAdapter() {
/**
* The entry unique ID needs to be tied to a project, so that
* new builds will replace the old result.
*/
@Override
public String getEntryID(Run e) {
// can't use a meaningful year field unless we remember when the job was created.
return "tag:hudson.dev.java.net,2008:"+e.getParent().getAbsoluteUrl();
}
};
/**
* {@link BuildBadgeAction} that shows the logs are being kept.
*/
public final class KeepLogBuildBadge implements BuildBadgeAction {
public @CheckForNull String getIconFileName() { return null; }
public @CheckForNull String getDisplayName() { return null; }
public @CheckForNull String getUrlName() { return null; }
public @CheckForNull String getWhyKeepLog() { return Run.this.getWhyKeepLog(); }
}
public static final PermissionGroup PERMISSIONS = new PermissionGroup(Run.class,Messages._Run_Permissions_Title());
public static final Permission DELETE = new Permission(PERMISSIONS,"Delete",Messages._Run_DeletePermission_Description(),Permission.DELETE, PermissionScope.RUN);
public static final Permission UPDATE = new Permission(PERMISSIONS,"Update",Messages._Run_UpdatePermission_Description(),Permission.UPDATE, PermissionScope.RUN);
/** See {@link hudson.Functions#isArtifactsPermissionEnabled} */
public static final Permission ARTIFACTS = new Permission(PERMISSIONS,"Artifacts",Messages._Run_ArtifactsPermission_Description(), null,
Functions.isArtifactsPermissionEnabled(), new PermissionScope[]{PermissionScope.RUN});
private static class DefaultFeedAdapter implements FeedAdapter<Run> {
public String getEntryTitle(Run entry) {
return entry.getDisplayName()+" ("+entry.getBuildStatusSummary().message+")";
}
public String getEntryUrl(Run entry) {
return entry.getUrl();
}
public String getEntryID(Run entry) {
return "tag:" + "hudson.dev.java.net,"
+ entry.getTimestamp().get(Calendar.YEAR) + ":"
+ entry.getParent().getFullName()+':'+entry.getId();
}
public String getEntryDescription(Run entry) {
return entry.getDescription();
}
public Calendar getEntryTimestamp(Run entry) {
return entry.getTimestamp();
}
public String getEntryAuthor(Run entry) {
return JenkinsLocationConfiguration.get().getAdminAddress();
}
}
@Override
public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) {
Object returnedResult = super.getDynamic(token, req, rsp);
if (returnedResult == null){
//check transient actions too
for(Action action: getTransientActions()){
String urlName = action.getUrlName();
if (urlName == null) {
continue;
}
if (urlName.equals(token)) {
return action;
}
}
// Next/Previous Build links on an action page (like /job/Abc/123/testReport)
// will also point to same action (/job/Abc/124/testReport), but other builds
// may not have the action.. tell browsers to redirect up to the build page.
returnedResult = new RedirectUp();
}
return returnedResult;
}
public static class RedirectUp {
public void doDynamic(StaplerResponse rsp) throws IOException {
// Compromise to handle both browsers (auto-redirect) and programmatic access
// (want accurate 404 response).. send 404 with javscript to redirect browsers.
rsp.setStatus(HttpServletResponse.SC_NOT_FOUND);
rsp.setContentType("text/html;charset=UTF-8");
PrintWriter out = rsp.getWriter();
out.println("<html><head>" +
"<meta http-equiv='refresh' content='1;url=..'/>" +
"<script>window.location.replace('..');</script>" +
"</head>" +
"<body style='background-color:white; color:white;'>" +
"Not found</body></html>");
out.flush();
}
}
}
|
use getFullDisplayName for RSS feed name
|
core/src/main/java/hudson/model/Run.java
|
use getFullDisplayName for RSS feed name
|
|
Java
|
mit
|
a85b04ff60442d0dd03f024291c914fdd3ef0238
| 0
|
ankel/Euler-weekend
|
package Ankel;
/**
*
* @author Binh Tran
* Problem 3:
* The prime factors of 13195 are 5, 7, 13 and 29.
* What is the largest prime factor of the number 600851475143 ?
*
*/
public class P3 {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
PrimeList p = new PrimeList();
long number = 600851475143L; // Integer too large, java?
long bound = (long)Math.ceil(Math.sqrt(number));
for (long i = 2; i <= bound; ++i) {
if (number % i == 0 && p.isPrime(i)) {
System.out.println(i);
}
}
}
}
|
Euler-weekend/src/Ankel/P3.java
|
package Ankel;
/**
*
* @author Binh Tran
* Problem 3:
* The prime factors of 13195 are 5, 7, 13 and 29.
* What is the largest prime factor of the number 600851475143 ?
*
*/
public class P3 {
/**
* @param args the command line arguments
*/
public static void main(String[] args) {
PrimeList p = new PrimeList();
long number = 6 * (long)Math.pow(10, 11) + 851475143; // Integer too large, java?
long bound = (long)Math.ceil(Math.sqrt(number));
for (long i = 2; i <= bound; ++i) {
if (number % i == 0 && p.isPrime(i)) {
System.out.println(i);
}
}
}
}
|
Long initializer, aka i'm dumb
|
Euler-weekend/src/Ankel/P3.java
|
Long initializer, aka i'm dumb
|
|
Java
|
mit
|
b4ca3932f945effdde582ce7c364f2de64ac1e69
| 0
|
irudyak/statsd-jvm-profiler,etsy/statsd-jvm-profiler,jasonchaffee/statsd-jvm-profiler,jasonchaffee/statsd-jvm-profiler,irudyak/statsd-jvm-profiler,danosipov/statsd-jvm-profiler,danosipov/statsd-jvm-profiler,etsy/statsd-jvm-profiler,danosipov/statsd-jvm-profiler,etsy/statsd-jvm-profiler,etsy/statsd-jvm-profiler,etsy/statsd-jvm-profiler,irudyak/statsd-jvm-profiler,jasonchaffee/statsd-jvm-profiler,danosipov/statsd-jvm-profiler,irudyak/statsd-jvm-profiler,jasonchaffee/statsd-jvm-profiler
|
package com.etsy.statsd.profiler.profilers;
import com.etsy.statsd.profiler.Profiler;
import com.timgroup.statsd.StatsDClient;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Profiles CPU time spent in each method
*
* @author Andrew Johnson
*/
public class CPUProfiler extends Profiler {
public static final long PERIOD = 1;
private ThreadMXBean threadMXBean;
private Map<String, Long> methodCounts;
private int profileCount;
public CPUProfiler(StatsDClient client) {
super(client);
threadMXBean = ManagementFactory.getThreadMXBean();
methodCounts = new HashMap<>();
profileCount = 0;
}
/**
* Profile CPU time by method call
*/
@Override
public void profile() {
profileCount++;
List<ThreadInfo> threads = getAllRunnableThreads();
for (ThreadInfo thread : threads) {
// certain threads do not have stack traces
if (thread.getStackTrace().length > 0) {
String traceKey = formatStackTrace(thread.getStackTrace());
// exclude other profilers from reporting
if (!traceKey.contains("com-etsy-statsd-profiler")) {
Long count = methodCounts.get(traceKey);
if (count == null) {
methodCounts.put(traceKey, PERIOD);
} else {
methodCounts.put(traceKey, count + PERIOD);
}
}
}
}
// To keep from overwhelming StatsD, we only report statistics every second
if (profileCount % 1000 == 0) {
recordMethodCounts();
}
}
/**
* Flush methodCounts data on shutdown
*/
@Override
public void flushData() {
recordMethodCounts();
}
@Override
public long getPeriod() {
return PERIOD;
}
@Override
public TimeUnit getTimeUnit() {
return TimeUnit.MILLISECONDS;
}
/**
* Records method CPU time in StatsD
*/
private void recordMethodCounts() {
for (Map.Entry<String, Long> entry : methodCounts.entrySet()) {
recordGaugeValue("cpu.method." + entry.getKey(), entry.getValue());
}
}
/**
* Formats a StackTraceElement as a String, excluding the line number
*
* @param element The StackTraceElement to format
* @return A String representing the given StackTraceElement
*/
private String formatStackTraceElement(StackTraceElement element) {
return String.format("%s-%s", element.getClassName().replace(".", "-"), element.getMethodName());
}
/**
* Formats an entire stack trace as a String
*
* @param stack The stack trace to format
* @return A String representing the given stack trace
*/
private String formatStackTrace(StackTraceElement[] stack) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < stack.length; i++) {
StackTraceElement element = stack[i];
String formatted = formatStackTraceElement(element);
builder.append(formatted);
if (i != stack.length - 1) {
builder.append(".");
}
}
return builder.toString();
}
/**
* Gets all runnable threads, excluding the current thread
*
* @return A List<ThreadInfo>
*/
private List<ThreadInfo> getAllRunnableThreads() {
List<ThreadInfo> threads = new ArrayList<>();
for (ThreadInfo t : threadMXBean.dumpAllThreads(false, false)) {
// We will sample all runnable threads that are not the current thread
if (t.getThreadState() == Thread.State.RUNNABLE && t.getThreadId() != Thread.currentThread().getId()) {
threads.add(t);
}
}
return threads;
}
}
|
src/main/java/com/etsy/statsd/profiler/profilers/CPUProfiler.java
|
package com.etsy.statsd.profiler.profilers;
import com.etsy.statsd.profiler.Profiler;
import com.timgroup.statsd.StatsDClient;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Profiles CPU time spent in each method
*
* @author Andrew Johnson
*/
public class CPUProfiler extends Profiler {
public static final long PERIOD = 1;
private ThreadMXBean threadMXBean;
private Map<String, Long> methodCounts;
private int profileCount;
public CPUProfiler(StatsDClient client) {
super(client);
threadMXBean = ManagementFactory.getThreadMXBean();
methodCounts = new HashMap<>();
profileCount = 0;
}
/**
* Profile CPU time by method call
*/
@Override
public void profile() {
profileCount++;
List<ThreadInfo> threads = getAllRunnableThreads();
for (ThreadInfo thread : threads) {
// certain threads do not have stack traces
if (thread.getStackTrace().length > 0) {
String traceKey = formatStackTrace(thread.getStackTrace());
// exclude other profilers from reporting
if (!traceKey.contains("com-etsy-statsd-profiler")) {
Long count = methodCounts.get(traceKey);
if (count == null) {
methodCounts.put(traceKey, PERIOD);
} else {
methodCounts.put(traceKey, count + PERIOD);
}
}
}
}
// To keep from overwhelming StatsD, we only report statistics every second
if (profileCount % 1000 == 0) {
recordMethodCounts();
}
}
/**
* Flush methodCounts data on shutdown
*/
@Override
public void flushData() {
recordMethodCounts();
}
@Override
public long getPeriod() {
return PERIOD;
}
@Override
public TimeUnit getTimeUnit() {
return TimeUnit.MILLISECONDS;
}
/**
* Records method CPU time in StatsD
*/
private void recordMethodCounts() {
for (Map.Entry<String, Long> entry : methodCounts.entrySet()) {
recordGaugeValue("cpu.method." + entry.getKey(), entry.getValue());
}
}
/**
* Formats a StackTraceElement as a String, excluding the line number
*
* @param element The StackTraceElement to format
* @return A String representing the given StackTraceElement
*/
private String formatStackTraceElement(StackTraceElement element) {
return String.format("%s:%s", element.getClassName().replace(".", "-"), element.getMethodName());
}
/**
* Formats an entire stack trace as a String
*
* @param stack The stack trace to format
* @return A String representing the given stack trace
*/
private String formatStackTrace(StackTraceElement[] stack) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < stack.length; i++) {
StackTraceElement element = stack[i];
String formatted = formatStackTraceElement(element);
builder.append(formatted);
if (i != stack.length - 1) {
builder.append(".");
}
}
return builder.toString();
}
/**
* Gets all runnable threads, excluding the current thread
*
* @return A List<ThreadInfo>
*/
private List<ThreadInfo> getAllRunnableThreads() {
List<ThreadInfo> threads = new ArrayList<>();
for (ThreadInfo t : threadMXBean.dumpAllThreads(false, false)) {
// We will sample all runnable threads that are not the current thread
if (t.getThreadState() == Thread.State.RUNNABLE && t.getThreadId() != Thread.currentThread().getId()) {
threads.add(t);
}
}
return threads;
}
}
|
Fix formatting
|
src/main/java/com/etsy/statsd/profiler/profilers/CPUProfiler.java
|
Fix formatting
|
|
Java
|
mit
|
9b18896bfb7c72107f95870f97e8fa909fbcc84c
| 0
|
EXASOL/virtual-schemas,EXASOL/virtual-schemas,EXASOL/virtual-schemas
|
package com.exasol.adapter.dialects.db2;
import com.exasol.adapter.AdapterException;
import com.exasol.adapter.dialects.SqlDialect;
import com.exasol.adapter.dialects.SqlGenerationContext;
import com.exasol.adapter.dialects.SqlGenerationHelper;
import com.exasol.adapter.dialects.SqlGenerationVisitor;
import com.exasol.adapter.jdbc.ColumnAdapterNotes;
import com.exasol.adapter.metadata.ColumnMetadata;
import com.exasol.adapter.metadata.TableMetadata;
import com.exasol.adapter.sql.*;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class DB2SqlGenerationVisitor extends SqlGenerationVisitor {
private Set<ScalarFunction> scalarFunctionsCast = new HashSet<>();
public DB2SqlGenerationVisitor(SqlDialect dialect, SqlGenerationContext context) {
super(dialect, context);
}
@Override
public String visit(SqlColumn column) throws AdapterException {
return getColumnProjectionString(column, super.visit(column));
}
private String getColumnProjectionString(SqlColumn column, String projString) throws AdapterException {
boolean isDirectlyInSelectList = (column.hasParent() && column.getParent().getType() == SqlNodeType.SELECT_LIST);
if (!isDirectlyInSelectList) {
return projString;
}
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
return getColumnProjectionStringNoCheckImpl(typeName, column, projString);
}
private String getColumnProjectionStringNoCheck(SqlColumn column, String projString) throws AdapterException {
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
return getColumnProjectionStringNoCheckImpl(typeName, column, projString);
}
private String getColumnProjectionStringNoCheckImpl(String typeName, SqlColumn column, String projString) {
switch (typeName) {
case "XML":
projString = "XMLSERIALIZE(" + projString + " as VARCHAR(32000) INCLUDING XMLDECLARATION)";
break;
//db2 does not support cast of clobs to varchar in full length -> max 32672
case "CLOB":
projString = "CAST(SUBSTRING(" + projString + ",32672) AS VARCHAR(32672))";
break;
case "CHAR () FOR BIT DATA":
case "VARCHAR () FOR BIT DATA":
projString = "HEX(" + projString + ")";
break;
case "TIME":
// cast timestamp to not lose precision
case "TIMESTAMP":
projString = "VARCHAR("+ projString + ")";
break;
default:
break;
}
if (TYPE_NAME_NOT_SUPPORTED.contains(typeName)){
projString = "'"+typeName+" NOT SUPPORTED'"; //returning a string constant for unsupported data types
}
return projString;
}
@Override
public String visit(SqlStatementSelect select) throws AdapterException {
if (!select.hasLimit()) {
return super.visit(select);
} else {
SqlLimit limit = select.getLimit();
StringBuilder sql = new StringBuilder();
sql.append("SELECT ");
sql.append(select.getSelectList().accept(this));
sql.append(" FROM ");
sql.append(select.getFromClause().accept(this));
if (select.hasFilter()) {
sql.append(" WHERE ");
sql.append(select.getWhereClause().accept(this));
}
if (select.hasGroupBy()) {
sql.append(" GROUP BY ");
sql.append(select.getGroupBy().accept(this));
}
if (select.hasHaving()) {
sql.append(" HAVING ");
sql.append(select.getHaving().accept(this));
}
if (select.hasOrderBy()) {
sql.append(" ");
sql.append(select.getOrderBy().accept(this));
}
sql.append(" FETCH FIRST " + limit.getLimit() + " ROWS ONLY");
return sql.toString();
}
}
@Override
public String visit(SqlSelectList selectList) throws AdapterException {
if (selectList.isRequestAnyColumn()) {
// The system requested any column
return "1";
}
List<String> selectListElements = new ArrayList<>();
if (selectList.isSelectStar()) {
if (SqlGenerationHelper.selectListRequiresCasts(selectList, nodeRequiresCast)) {
// Do as if the user has all columns in select list
SqlStatementSelect select = (SqlStatementSelect) selectList.getParent();
int columnId = 0;
List<TableMetadata> tableMetadata = new ArrayList<TableMetadata>();
SqlGenerationHelper.getMetadataFrom(select.getFromClause(), tableMetadata );
for (TableMetadata tableMeta : tableMetadata) {
for (ColumnMetadata columnMeta : tableMeta.getColumns()) {
SqlColumn sqlColumn = new SqlColumn(columnId, columnMeta);
selectListElements.add( getColumnProjectionStringNoCheck(sqlColumn, super.visit(sqlColumn) ) );
++columnId;
}
}
} else {
selectListElements.add("*");
}
} else {
for (SqlNode node : selectList.getExpressions()) {
selectListElements.add(node.accept(this));
}
}
return Joiner.on(", ").join(selectListElements);
}
@Override
public String visit(SqlFunctionScalar function) throws AdapterException {
String sql = super.visit(function);
switch (function.getFunction()) {
case TRIM: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("TRIM(");
if (argumentsSql.size() > 1) {
builder.append(argumentsSql.get(1));
builder.append(" FROM ");
builder.append(argumentsSql.get(0));
} else {
builder.append(argumentsSql.get(0));
}
builder.append(")");
sql = builder.toString();
break;
}
case ADD_DAYS:
case ADD_HOURS:
case ADD_MINUTES:
case ADD_SECONDS:
case ADD_WEEKS:
case ADD_YEARS: {
List<String> argumentsSql = new ArrayList<>();
Boolean isTimestamp = false; //special cast required
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
SqlColumn column = (SqlColumn) function.getArguments().get(0);
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
System.out.println("!DB2 : " + typeName);
if (typeName.contains("TIMESTAMP"))
{
isTimestamp = true;
System.out.println("!DB2 : we got a timestamp");
builder.append("VARCHAR(");
}
builder.append(argumentsSql.get(0));
builder.append(" + ");
builder.append(argumentsSql.get(1));
builder.append(" ");
switch (function.getFunction()) {
case ADD_DAYS:
case ADD_WEEKS:
builder.append("DAYS");
break;
case ADD_HOURS:
builder.append("HOURS");
break;
case ADD_MINUTES:
builder.append("MINUTES");
break;
case ADD_SECONDS:
builder.append("SECONDS");
break;
case ADD_YEARS:
builder.append("YEARS");
break;
default:
break;
}
if (isTimestamp)
{
builder.append(")");
}
sql = builder.toString();
break;
}
case CURRENT_DATE:
sql = "CURRENT DATE";
break;
case CURRENT_TIMESTAMP:
sql = "VARCHAR(CURRENT TIMESTAMP)";
break;
case DBTIMEZONE:
sql = "DBTIMEZONE";
break;
case LOCALTIMESTAMP:
sql = "LOCALTIMESTAMP";
break;
case SESSIONTIMEZONE:
sql = "SESSIONTIMEZONE";
break;
case SYSDATE:
sql = "CURRENT DATE";
break;
case SYSTIMESTAMP:
sql = "VARCHAR(CURRENT TIMESTAMP)";
break;
case BIT_AND:
sql = sql.replaceFirst("^BIT_AND", "BITAND");
break;
case BIT_TO_NUM:
sql = sql.replaceFirst("^BIT_TO_NUM", "BIN_TO_NUM");
break;
case NULLIFZERO: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("NULLIF(");
builder.append(argumentsSql.get(0));
builder.append(", 0)");
sql = builder.toString();
break;
}
case ZEROIFNULL: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("IFNULL(");
builder.append(argumentsSql.get(0));
builder.append(", 0)");
sql = builder.toString();
break;
}
case DIV: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("CAST(FLOOR(");
builder.append(argumentsSql.get(0));
builder.append(" / FLOOR(");
builder.append(argumentsSql.get(1));
builder.append(")) AS DECIMAL(36, 0))");
sql = builder.toString();
break;
}
default:
break;
}
boolean isDirectlyInSelectList = (function.hasParent() && function.getParent().getType() == SqlNodeType.SELECT_LIST);
if (isDirectlyInSelectList && scalarFunctionsCast.contains(function.getFunction())) {
// Cast to FLOAT because result set metadata has precision = 0, scale = 0
sql = "CAST(" + sql + " AS FLOAT)";
}
return sql;
}
@Override
public String visit(SqlFunctionAggregate function) throws AdapterException {
String sql = super.visit(function);
switch (function.getFunction()) {
case VAR_SAMP:
sql = sql.replaceFirst("^VAR_SAMP", "VARIANCE_SAMP");
break;
default:
break;
}
return sql;
}
@Override
public String visit(SqlFunctionAggregateGroupConcat function) throws AdapterException {
StringBuilder builder = new StringBuilder();
builder.append("LISTAGG");
builder.append("(");
assert(function.getArguments() != null);
assert(function.getArguments().size() == 1 && function.getArguments().get(0) != null);
String expression = function.getArguments().get(0).accept(this);
builder.append(expression);
builder.append(", ");
String separator = ",";
if (function.getSeparator() != null) {
separator = function.getSeparator();
}
builder.append("'");
builder.append(separator);
builder.append("') ");
builder.append("WITHIN GROUP(ORDER BY ");
if (function.hasOrderBy()) {
for (int i = 0; i < function.getOrderBy().getExpressions().size(); i++) {
if (i > 0) {
builder.append(", ");
}
builder.append(function.getOrderBy().getExpressions().get(i).accept(this));
if (!function.getOrderBy().isAscending().get(i)) {
builder.append(" DESC");
}
}
} else {
builder.append(expression);
}
builder.append(")");
return builder.toString();
}
private static final List<String> TYPE_NAMES_REQUIRING_CAST = ImmutableList.of("TIMESTAMP","DECFLOAT","CLOB","XML","TIME");
private static final List<String> TYPE_NAME_NOT_SUPPORTED = ImmutableList.of("BLOB");
private java.util.function.Predicate<SqlNode> nodeRequiresCast = node -> {
try {
if (node.getType() == SqlNodeType.COLUMN) {
SqlColumn column = (SqlColumn)node;
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
return TYPE_NAMES_REQUIRING_CAST.contains(typeName);
}
return false;
} catch (Exception e) {
throw new RuntimeException(e);
}
};
}
|
jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/db2/DB2SqlGenerationVisitor.java
|
package com.exasol.adapter.dialects.db2;
import com.exasol.adapter.AdapterException;
import com.exasol.adapter.dialects.SqlDialect;
import com.exasol.adapter.dialects.SqlGenerationContext;
import com.exasol.adapter.dialects.SqlGenerationHelper;
import com.exasol.adapter.dialects.SqlGenerationVisitor;
import com.exasol.adapter.jdbc.ColumnAdapterNotes;
import com.exasol.adapter.metadata.ColumnMetadata;
import com.exasol.adapter.metadata.TableMetadata;
import com.exasol.adapter.sql.*;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author Karl Griesser (fullref@gmail.com)
*/
public class DB2SqlGenerationVisitor extends SqlGenerationVisitor {
private Set<ScalarFunction> scalarFunctionsCast = new HashSet<>();
public DB2SqlGenerationVisitor(SqlDialect dialect, SqlGenerationContext context) {
super(dialect, context);
}
@Override
public String visit(SqlColumn column) throws AdapterException {
return getColumnProjectionString(column, super.visit(column));
}
private String getColumnProjectionString(SqlColumn column, String projString) throws AdapterException {
boolean isDirectlyInSelectList = (column.hasParent() && column.getParent().getType() == SqlNodeType.SELECT_LIST);
if (!isDirectlyInSelectList) {
return projString;
}
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
return getColumnProjectionStringNoCheckImpl(typeName, column, projString);
}
private String getColumnProjectionStringNoCheck(SqlColumn column, String projString) throws AdapterException {
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
return getColumnProjectionStringNoCheckImpl(typeName, column, projString);
}
private String getColumnProjectionStringNoCheckImpl(String typeName, SqlColumn column, String projString) {
switch (typeName) {
case "XML":
projString = "XMLSERIALIZE(" + projString + " as VARCHAR(32000) INCLUDING XMLDECLARATION)";
break;
//db2 does not support cast of clobs to varchar in full length -> max 32672
case "CLOB":
projString = "CAST(SUBSTRING(" + projString + ",32672) AS VARCHAR(32672))";
break;
case "CHAR () FOR BIT DATA":
case "VARCHAR () FOR BIT DATA":
projString = "HEX(" + projString + ")";
break;
case "TIME":
// cast timestamp to not lose precision
case "TIMESTAMP":
projString = "VARCHAR("+ projString + ")";
break;
default:
break;
}
if (TYPE_NAME_NOT_SUPPORTED.contains(typeName)){
projString = "'"+typeName+" NOT SUPPORTED'"; //returning a string constant for unsupported data types
}
return projString;
}
@Override
public String visit(SqlStatementSelect select) throws AdapterException {
if (!select.hasLimit()) {
return super.visit(select);
} else {
SqlLimit limit = select.getLimit();
StringBuilder sql = new StringBuilder();
sql.append("SELECT ");
sql.append(select.getSelectList().accept(this));
sql.append(" FROM ");
sql.append(select.getFromClause().accept(this));
if (select.hasFilter()) {
sql.append(" WHERE ");
sql.append(select.getWhereClause().accept(this));
}
if (select.hasGroupBy()) {
sql.append(" GROUP BY ");
sql.append(select.getGroupBy().accept(this));
}
if (select.hasHaving()) {
sql.append(" HAVING ");
sql.append(select.getHaving().accept(this));
}
if (select.hasOrderBy()) {
sql.append(" ");
sql.append(select.getOrderBy().accept(this));
}
sql.append(" FETCH FIRST " + limit.getLimit() + " ROWS ONLY");
return sql.toString();
}
}
@Override
public String visit(SqlSelectList selectList) throws AdapterException {
if (selectList.isRequestAnyColumn()) {
// The system requested any column
return "1";
}
List<String> selectListElements = new ArrayList<>();
if (selectList.isSelectStar()) {
if (SqlGenerationHelper.selectListRequiresCasts(selectList, nodeRequiresCast)) {
// Do as if the user has all columns in select list
SqlStatementSelect select = (SqlStatementSelect) selectList.getParent();
int columnId = 0;
List<TableMetadata> tableMetadata = new ArrayList<TableMetadata>();
SqlGenerationHelper.getMetadataFrom(select.getFromClause(), tableMetadata );
for (TableMetadata tableMeta : tableMetadata) {
for (ColumnMetadata columnMeta : tableMeta.getColumns()) {
SqlColumn sqlColumn = new SqlColumn(columnId, columnMeta);
selectListElements.add( getColumnProjectionStringNoCheck(sqlColumn, super.visit(sqlColumn) ) );
++columnId;
}
}
} else {
selectListElements.add("*");
}
} else {
for (SqlNode node : selectList.getExpressions()) {
selectListElements.add(node.accept(this));
}
}
return Joiner.on(", ").join(selectListElements);
}
@Override
public String visit(SqlFunctionScalar function) throws AdapterException {
String sql = super.visit(function);
switch (function.getFunction()) {
case TRIM: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("TRIM(");
if (argumentsSql.size() > 1) {
builder.append(argumentsSql.get(1));
builder.append(" FROM ");
builder.append(argumentsSql.get(0));
} else {
builder.append(argumentsSql.get(0));
}
builder.append(")");
sql = builder.toString();
break;
}
case ADD_DAYS:
case ADD_HOURS:
case ADD_MINUTES:
case ADD_SECONDS:
case ADD_WEEKS:
case ADD_YEARS: {
List<String> argumentsSql = new ArrayList<>();
Boolean isTimestamp = false; //special cast required
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
SqlColumn column = (SqlColumn) function.getArguments().get(0);
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
System.out.println("!DB2 : " + typeName);
if (typeName.contains("TIMESTAMP"))
{
isTimestamp = true;
System.out.println("!DB2 : we got a timestamp");
builder.append("VARCHAR(");
}
builder.append(argumentsSql.get(0));
builder.append(" + ");
builder.append(argumentsSql.get(1));
builder.append(" ");
switch (function.getFunction()) {
case ADD_DAYS:
case ADD_WEEKS:
builder.append("DAYS");
break;
case ADD_HOURS:
builder.append("HOURS");
break;
case ADD_MINUTES:
builder.append("MINUTES");
break;
case ADD_SECONDS:
builder.append("SECONDS");
break;
case ADD_YEARS:
builder.append("YEARS");
break;
default:
break;
}
if (isTimestamp)
{
builder.append(")");
}
sql = builder.toString();
break;
}
case CURRENT_DATE:
sql = "CURRENT DATE";
break;
case CURRENT_TIMESTAMP:
sql = "VARCHAR(CURRENT TIMESTAMP)";
break;
case DBTIMEZONE:
sql = "DBTIMEZONE";
break;
case LOCALTIMESTAMP:
sql = "LOCALTIMESTAMP";
break;
case SESSIONTIMEZONE:
sql = "SESSIONTIMEZONE";
break;
case SYSDATE:
sql = "CURRENT DATE";
break;
case SYSTIMESTAMP:
sql = "VARCHAR(CURRENT TIMESTAMP)";
break;
case BIT_AND:
sql = sql.replaceFirst("^BIT_AND", "BITAND");
break;
case BIT_TO_NUM:
sql = sql.replaceFirst("^BIT_TO_NUM", "BIN_TO_NUM");
break;
case NULLIFZERO: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("NULLIF(");
builder.append(argumentsSql.get(0));
builder.append(", 0)");
sql = builder.toString();
break;
}
case ZEROIFNULL: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("IFNULL(");
builder.append(argumentsSql.get(0));
builder.append(", 0)");
sql = builder.toString();
break;
}
case DIV: {
List<String> argumentsSql = new ArrayList<>();
for (SqlNode node : function.getArguments()) {
argumentsSql.add(node.accept(this));
}
StringBuilder builder = new StringBuilder();
builder.append("CAST(FLOOR(");
builder.append(argumentsSql.get(0));
builder.append(" / FLOOR(");
builder.append(argumentsSql.get(1));
builder.append(")) AS DECIMAL(36, 0))");
sql = builder.toString();
break;
}
default:
break;
}
boolean isDirectlyInSelectList = (function.hasParent() && function.getParent().getType() == SqlNodeType.SELECT_LIST);
if (isDirectlyInSelectList && scalarFunctionsCast.contains(function.getFunction())) {
// Cast to FLOAT because result set metadata has precision = 0, scale = 0
sql = "CAST(" + sql + " AS FLOAT)";
}
return sql;
}
@Override
public String visit(SqlFunctionAggregate function) throws AdapterException {
String sql = super.visit(function);
switch (function.getFunction()) {
case VAR_SAMP:
sql = sql.replaceFirst("^VAR_SAMP", "VARIANCE_SAMP");
break;
default:
break;
}
return sql;
}
@Override
public String visit(SqlFunctionAggregateGroupConcat function) throws AdapterException {
StringBuilder builder = new StringBuilder();
builder.append("LISTAGG");
builder.append("(");
assert(function.getArguments() != null);
assert(function.getArguments().size() == 1 && function.getArguments().get(0) != null);
String expression = function.getArguments().get(0).accept(this);
builder.append(expression);
builder.append(", ");
String separator = ",";
if (function.getSeparator() != null) {
separator = function.getSeparator();
}
builder.append("'");
builder.append(separator);
builder.append("') ");
builder.append("WITHIN GROUP(ORDER BY ");
if (function.hasOrderBy()) {
for (int i = 0; i < function.getOrderBy().getExpressions().size(); i++) {
if (i > 0) {
builder.append(", ");
}
builder.append(function.getOrderBy().getExpressions().get(i).accept(this));
if (!function.getOrderBy().isAscending().get(i)) {
builder.append(" DESC");
}
}
} else {
builder.append(expression);
}
builder.append(")");
return builder.toString();
}
private static final List<String> TYPE_NAMES_REQUIRING_CAST = ImmutableList.of("TIMESTAMP","DECFLOAT","CLOB","XML","TIME");
private static final List<String> TYPE_NAME_NOT_SUPPORTED = ImmutableList.of("BLOB");
private java.util.function.Predicate<SqlNode> nodeRequiresCast = node -> {
try {
if (node.getType() == SqlNodeType.COLUMN) {
SqlColumn column = (SqlColumn)node;
String typeName = ColumnAdapterNotes.deserialize(column.getMetadata().getAdapterNotes(), column.getMetadata().getName()).getTypeName();
return TYPE_NAMES_REQUIRING_CAST.contains(typeName);
}
return false;
} catch (Exception e) {
throw new RuntimeException(e);
}
};
}
|
#109: Remove authors from classes
|
jdbc-adapter/virtualschema-jdbc-adapter/src/main/java/com/exasol/adapter/dialects/db2/DB2SqlGenerationVisitor.java
|
#109: Remove authors from classes
|
|
Java
|
mit
|
235d872fbe5a53b82a948473ce729d6c72a85b79
| 0
|
synchrotron-soleil-ica/continuous-materials,synchrotron-soleil-ica/continuous-materials,synchrotron-soleil-ica/continuous-materials
|
package fr.synchrotron.soleil.ica.ci.service.legacymavenproxy.pommetadata;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.shareddata.ConcurrentSharedMap;
import org.vertx.java.core.shareddata.SharedData;
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* @author Gregory Boissinot
*/
public class POMCache {
private static final String KEY_CACHE_POM_CONTENT = "pomContent";
private static final String KEY_CACHE_POM_SHA1 = "pomContentSha1";
private final ConcurrentSharedMap<String, String> pomContentMap;
private final ConcurrentSharedMap<String, String> pomSha1Map;
public POMCache(Vertx vertx) {
final SharedData sharedData = vertx.sharedData();
pomContentMap = sharedData.getMap(KEY_CACHE_POM_CONTENT);
pomSha1Map = sharedData.getMap(KEY_CACHE_POM_SHA1);
}
public String getSha1(String pomSha1Path) {
return pomSha1Map.get(pomSha1Path);
}
public void putPomContent(String pomPath, String pomContent) {
pomContentMap.put(pomPath, pomContent);
final String sha1Path = pomPath + ".sha1";
try {
MessageDigest crypt = MessageDigest.getInstance("SHA-1");
crypt.reset();
crypt.update(pomContent.getBytes("UTF-8"));
pomSha1Map.put(sha1Path, String.valueOf(crypt.digest()));
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
|
Services/ServiceLegacyMavenRepoProxy/src/main/java/fr/synchrotron/soleil/ica/ci/service/legacymavenproxy/pommetadata/POMCache.java
|
package fr.synchrotron.soleil.ica.ci.service.legacymavenproxy.pommetadata;
import org.vertx.java.core.Vertx;
import org.vertx.java.core.shareddata.ConcurrentSharedMap;
import org.vertx.java.core.shareddata.SharedData;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* @author Gregory Boissinot
*/
public class POMCache {
private static final String KEY_CACHE_POM_CONTENT = "pomContent";
private static final String KEY_CACHE_POM_SHA1 = "pomContentSha1";
private final ConcurrentSharedMap<String, String> pomContentMap;
private final ConcurrentSharedMap<String, String> pomSha1Map;
public POMCache(Vertx vertx) {
final SharedData sharedData = vertx.sharedData();
pomContentMap = sharedData.getMap(KEY_CACHE_POM_CONTENT);
pomSha1Map = sharedData.getMap(KEY_CACHE_POM_SHA1);
}
public String loadPomContentFromCache(Vertx vertx, String pomPath) {
return pomContentMap.get(pomPath);
}
public String getSha1(String pomSha1Path) {
return pomSha1Map.get(pomSha1Path);
}
public void putPomContent(String pomPath, String pomContent) {
pomContentMap.put(pomPath, pomContent);
final String sha1Path = pomPath + ".sha1";
try {
MessageDigest crypt = MessageDigest.getInstance("SHA-1");
crypt.reset();
crypt.update(pomContent.getBytes("UTF-8"));
String sha1 = new BigInteger(1, crypt.digest()).toString(16);
pomSha1Map.put(sha1Path, sha1);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
|
Fix sha1
|
Services/ServiceLegacyMavenRepoProxy/src/main/java/fr/synchrotron/soleil/ica/ci/service/legacymavenproxy/pommetadata/POMCache.java
|
Fix sha1
|
|
Java
|
epl-1.0
|
7353998925858d5dbcc88fdfd77f1fdcab5d2c1d
| 0
|
apribeiro/RSLingo-Studio
|
package rslingo.rslil.ui.handlers;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Iterator;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.handlers.HandlerUtil;
import rslingo.rslil.ui.windows.ImportWindow;
import rslingo.rslil.ui.windows.MenuCommand;
import rslingo.rslil.ui.windows.MenuCommandWindow;
public class ImportExcelHandler extends AbstractHandler {
private static final String GEN_FOLDER = "src-gen";
private static final String DOCS_FOLDER = "docs";
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
IWorkbenchWindow workbenchWindow = HandlerUtil.getActiveWorkbenchWindowChecked(event);
FileDialog dialog = new FileDialog(workbenchWindow.getShell(), SWT.OPEN);
dialog.setFilterExtensions(new String[] { "*.xlsx" });
dialog.setText("Select the Excel file to upload");
final String filePath = dialog.open();
final String fileName = dialog.getFileName();
if (filePath != null) {
ImportWindow importWindow = new ImportWindow();
String importMode = importWindow.open();
if (importMode != null) {
MenuCommand cmd = new MenuCommand() {
@Override
public void execute(IProject project, IFile file) throws Exception {
importExcelAndGenerateFiles(project, filePath, fileName, importMode);
}
};
MenuCommandWindow window = new MenuCommandWindow(workbenchWindow.getShell(),
cmd, true, null);
window.open();
}
}
return null;
}
private void importExcelAndGenerateFiles(IProject project, String filePath,
String fileName, String importMode) throws Exception {
IFolder srcGenFolder = project.getFolder(GEN_FOLDER);
if (!srcGenFolder.exists()) {
srcGenFolder.create(true, true, new NullProgressMonitor());
}
IFolder docsFolder = srcGenFolder.getFolder(DOCS_FOLDER);
if (!docsFolder.exists()) {
docsFolder.create(true, true, new NullProgressMonitor());
}
importExcelFile(docsFolder, filePath, fileName);
// Remove file extension
if (fileName.endsWith(".xlsx")) {
fileName = fileName.split(".xlsx")[0];
}
if (importMode.equals(ImportWindow.SINGLE)) {
generateSingleFile(srcGenFolder, filePath, fileName);
} else {
// generateMainFile(srcGenFolder, filePath, fileName);
// generateStatementsFile(srcGenFolder, filePath, fileName);
// generatePrivateDataFile(srcGenFolder, filePath, fileName);
// generateServicesFile(srcGenFolder, filePath, fileName);
// generateEnforcementsFile(srcGenFolder, filePath, fileName);
// generateRecipientsFile(srcGenFolder, filePath, fileName);
}
}
private void importExcelFile(IFolder docsFolder, String filePath, String fileName)
throws Exception {
IFile file = docsFolder.getFile(fileName);
InputStream source = new FileInputStream(new File(filePath));
if (!file.exists()) {
file.create(source, IResource.FORCE, new NullProgressMonitor());
} else {
file.setContents(source, IResource.FORCE, new NullProgressMonitor());
}
}
private void generateSingleFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package-Project " + fileName + " {");
sb.append("\n");
sb.append("\n");
generateProjectRegion(wb, sb);
generateGlossaryRegion(wb, sb);
generateStakeholdersRegion(wb, sb);
// generateRecipientsRegion(wb, sb);
// generateServicesRegion(wb, sb);
// generateEnforcementsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, new NullProgressMonitor());
} else {
file.setContents(source, IResource.FORCE, new NullProgressMonitor());
}
}
private void generateMainFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Main {");
sb.append("\n");
sb.append("\n");
sb.append("import " + fileName + ".Statements.*");
sb.append("\n");
sb.append("import " + fileName + ".Privatedata.*");
sb.append("\n");
sb.append("import " + fileName + ".Recipients.*");
sb.append("\n");
sb.append("import " + fileName + ".Enforcements.*");
sb.append("\n");
sb.append("import " + fileName + ".Services.*");
sb.append("\n");
sb.append("\n");
// generateMetadataRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Main.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateStatementsFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Statements {");
sb.append("\n");
sb.append("\n");
sb.append("import " + fileName + ".Privatedata.*");
sb.append("\n");
sb.append("import " + fileName + ".Services.*");
sb.append("\n");
sb.append("import " + fileName + ".Enforcements.*");
sb.append("\n");
sb.append("import " + fileName + ".Recipients.*");
sb.append("\n");
sb.append("\n");
// generateStatementsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Statements.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generatePrivateDataFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Privatedata {");
sb.append("\n");
sb.append("\n");
// generatePrivateDataRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Privatedata.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateServicesFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Services {");
sb.append("\n");
sb.append("\n");
sb.append("import " + fileName + ".Privatedata.*");
sb.append("\n");
sb.append("\n");
generateServicesRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Services.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateEnforcementsFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Enforcements {");
sb.append("\n");
sb.append("\n");
generateEnforcementsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Enforcements.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateRecipientsFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Recipients {");
sb.append("\n");
sb.append("\n");
generateRecipientsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Recipients.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateProjectRegion(Workbook wb, StringBuilder sb) {
// Get the Home Sheet
Sheet sheet = wb.getSheet("home");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header rows
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
Row row = rowIt.next();
Cell cellId = row.getCell(0);
String id = formatId(cellId.getStringCellValue());
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
sb.append("\tProject " + id + " {");
sb.append("\n");
sb.append("\t\tName \"" + name + "\"");
sb.append("\n");
sb.append("\t\tDescription \"" + description + "\"");
sb.append("\n\t}");
sb.append("\n\n");
}
private void generateGlossaryRegion(Workbook wb, StringBuilder sb) {
// Get the Glossary Sheet
Sheet sheet = wb.getSheet("glossary");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header rows
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
String id = formatId(cellId.getStringCellValue());
if (!id.isEmpty()) {
Cell cellName = row.getCell(1);
String name= cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
if (type.contains(";")) {
String aux = "";
for (String s : type.split("; ")) {
aux += toUpperFirst(s);
aux += ", ";
}
type = aux.substring(0, aux.length() - 2);
} else {
type = toUpperFirst(type);
}
Cell cellAcronym = row.getCell(4);
String acronym = cellAcronym.getStringCellValue();
Cell cellPOS = row.getCell(5);
String pos = cellPOS.getStringCellValue();
pos = toUpperFirst(pos);
Cell cellSynset = row.getCell(6);
String synset = cellSynset.getStringCellValue();
// Term Relation Type
// Term Relation
sb.append("\tGlossaryTerm " + id + " {");
sb.append("\n");
if (!name.isEmpty()) {
sb.append("\t\tName \"" + name + "\"");
sb.append("\n");
}
if (!description.isEmpty()) {
sb.append("\t\tDescription \"" + description + "\"");
sb.append("\n");
}
sb.append("\t\tType " + type);
sb.append("\n");
if (!acronym.isEmpty()) {
sb.append("\t\tAcronym \"" + acronym + "\"");
sb.append("\n");
}
if (!pos.isEmpty()) {
sb.append("\t\tPOS " + pos);
sb.append("\n");
}
if (!synset.isEmpty()) {
sb.append("\t\tSynset \"" + synset + "\"");
// sb.append("\n");
}
// TODO: Add Term Relations
sb.append("\n\t}");
sb.append("\n\n");
}
}
else
break;
}
}
private void generateStakeholdersRegion(Workbook wb, StringBuilder sb) {
// Get the Stakeholders Sheet
Sheet sheet = wb.getSheet("stakeholders");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header rows
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
String id = formatId(cellId.getStringCellValue());
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
String[] aux = type.split("\\.");
type = toUpperFirst(aux[0]) + "." + toUpperFirst(aux[1]);
Cell cellCategory = row.getCell(4);
String category = cellCategory.getStringCellValue();
if (category.contains(".")) {
aux = category.split("\\.");
if (aux.length > 2) {
category = toUpperFirst(aux[0]) + "." + toUpperFirst(aux[1])
+ "." + toUpperFirst(aux[2]);
} else {
category = toUpperFirst(aux[0]) + "." + toUpperFirst(aux[1]);
}
} else {
category = toUpperFirst(category);
}
Cell cellPartOf = row.getCell(5);
String partOf = cellPartOf.getStringCellValue();
sb.append("\tStakeholder " + id + " {");
sb.append("\n");
if (!name.isEmpty()) {
sb.append("\t\tName \"" + name + "\"");
sb.append("\n");
}
if (!description.isEmpty()) {
sb.append("\t\tDescription \"" + description + "\"");
sb.append("\n");
}
sb.append("\t\tType " + type);
sb.append("\n");
sb.append("\t\tCategory " + category);
sb.append("\n");
if (!partOf.isEmpty()) {
sb.append("\t\tPartOf " + partOf);
sb.append("\n");
}
sb.append("\t}");
sb.append("\n\n");
}
else
break;
}
}
private void generateServicesRegion(Workbook wb, StringBuilder sb) {
// Get the Services Sheet
Sheet sheet = wb.getSheet("Services");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
int id = (int) cellId.getNumericCellValue();
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellPrivateData = row.getCell(3);
Cell cellPartOf = row.getCell(4);
sb.append("Service S" + id + " {");
sb.append("\n");
sb.append("\tName \"" + name + "\"");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
if (cellPrivateData.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int privateData = (int) cellPrivateData.getNumericCellValue();
sb.append("\tRefersTo PrivateData PD" + privateData);
sb.append("\n");
} else if (cellPrivateData.getCellType() == Cell.CELL_TYPE_STRING) {
String privateData = cellPrivateData.getStringCellValue();
if (privateData.equals("All")) {
sb.append("\tRefersTo PrivateData All");
} else {
sb.append("\tRefersTo PrivateData ");
for (String s : privateData.split(", ")) {
sb.append("PD" + s + ",");
}
// Delete last ','
sb.deleteCharAt(sb.length() - 1);
}
sb.append("\n");
}
if (cellPartOf.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int partOf = (int) cellPartOf.getNumericCellValue();
sb.append("\tService_Part S" + partOf);
sb.append("\n");
}
sb.append("}");
sb.append("\n\n");
}
else
break;
}
}
private void generateEnforcementsRegion(Workbook wb, StringBuilder sb) {
// Get the Enforcements Sheet
Sheet sheet = wb.getSheet("Enforcements");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
int id = (int) cellId.getNumericCellValue();
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
sb.append("Enforcement En" + id + " {");
sb.append("\n");
sb.append("\tName \"" + name + "\"");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
sb.append("\tType " + type);
sb.append("\n}");
sb.append("\n\n");
}
else
break;
}
}
private void generateRecipientsRegion(Workbook wb, StringBuilder sb) {
// Get the Recipients Sheet
Sheet sheet = wb.getSheet("Recipients");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
if (cellId.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int id = (int) cellId.getNumericCellValue();
Cell cellDescription = row.getCell(1);
String description = cellDescription.getStringCellValue();
Cell cellScope = row.getCell(2);
String scope = cellScope.getStringCellValue();
if (scope.contains("/")) {
scope = "Internal/External";
} else {
scope = scope.substring(0, 1).toUpperCase() + scope.substring(1);
}
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
if (type.contains("/")) {
type = "Individual/Organization";
} else {
type = type.substring(0, 1).toUpperCase() + type.substring(1);
}
Cell cellPartOf = row.getCell(4);
sb.append("Recipient R" + id + " {");
sb.append("\n");
sb.append("\tName \"" + description + "\"");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
if (cellPartOf.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int partOf = (int) cellPartOf.getNumericCellValue();
sb.append("\tRecipient_Part R" + partOf);
sb.append("\n");
}
sb.append("\tScope " + scope);
sb.append("\n");
sb.append("\tType " + type);
sb.append("\n}");
sb.append("\n\n");
}
}
else
break;
}
}
private String formatId(String id) {
return id.replaceAll(" ", "_").replaceAll("-", "_");
}
private String toUpperFirst(String s) {
return s.substring(0, 1).toUpperCase() + s.substring(1);
}
}
|
rslingo.rslil.ui/src/rslingo/rslil/ui/handlers/ImportExcelHandler.java
|
package rslingo.rslil.ui.handlers;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.Iterator;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.handlers.HandlerUtil;
import rslingo.rslil.ui.windows.ImportWindow;
import rslingo.rslil.ui.windows.MenuCommand;
import rslingo.rslil.ui.windows.MenuCommandWindow;
public class ImportExcelHandler extends AbstractHandler {
private static final String GEN_FOLDER = "src-gen";
private static final String DOCS_FOLDER = "docs";
@Override
public Object execute(ExecutionEvent event) throws ExecutionException {
IWorkbenchWindow workbenchWindow = HandlerUtil.getActiveWorkbenchWindowChecked(event);
FileDialog dialog = new FileDialog(workbenchWindow.getShell(), SWT.OPEN);
dialog.setFilterExtensions(new String[] { "*.xlsx" });
dialog.setText("Select the Excel file to upload");
final String filePath = dialog.open();
final String fileName = dialog.getFileName();
if (filePath != null) {
ImportWindow importWindow = new ImportWindow();
String importMode = importWindow.open();
if (importMode != null) {
MenuCommand cmd = new MenuCommand() {
@Override
public void execute(IProject project, IFile file) throws Exception {
importExcelAndGenerateFiles(project, filePath, fileName, importMode);
}
};
MenuCommandWindow window = new MenuCommandWindow(workbenchWindow.getShell(),
cmd, true, null);
window.open();
}
}
return null;
}
private void importExcelAndGenerateFiles(IProject project, String filePath,
String fileName, String importMode) throws Exception {
IFolder srcGenFolder = project.getFolder(GEN_FOLDER);
if (!srcGenFolder.exists()) {
srcGenFolder.create(true, true, new NullProgressMonitor());
}
IFolder docsFolder = srcGenFolder.getFolder(DOCS_FOLDER);
if (!docsFolder.exists()) {
docsFolder.create(true, true, new NullProgressMonitor());
}
importExcelFile(docsFolder, filePath, fileName);
// Remove file extension
if (fileName.endsWith(".xlsx")) {
fileName = fileName.split(".xlsx")[0];
}
if (importMode.equals(ImportWindow.SINGLE)) {
generateSingleFile(srcGenFolder, filePath, fileName);
} else {
// generateMainFile(srcGenFolder, filePath, fileName);
// generateStatementsFile(srcGenFolder, filePath, fileName);
// generatePrivateDataFile(srcGenFolder, filePath, fileName);
// generateServicesFile(srcGenFolder, filePath, fileName);
// generateEnforcementsFile(srcGenFolder, filePath, fileName);
// generateRecipientsFile(srcGenFolder, filePath, fileName);
}
}
private void importExcelFile(IFolder docsFolder, String filePath, String fileName)
throws Exception {
IFile file = docsFolder.getFile(fileName);
InputStream source = new FileInputStream(new File(filePath));
if (!file.exists()) {
file.create(source, IResource.FORCE, new NullProgressMonitor());
} else {
file.setContents(source, IResource.FORCE, new NullProgressMonitor());
}
}
private void generateSingleFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package-Project " + fileName + " {");
sb.append("\n");
sb.append("\n");
generateProjectRegion(wb, sb);
generateGlossaryRegion(wb, sb);
// generatePrivateDataRegion(wb, sb);
// generateRecipientsRegion(wb, sb);
// generateServicesRegion(wb, sb);
// generateEnforcementsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, new NullProgressMonitor());
} else {
file.setContents(source, IResource.FORCE, new NullProgressMonitor());
}
}
private void generateMainFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Main {");
sb.append("\n");
sb.append("\n");
sb.append("import " + fileName + ".Statements.*");
sb.append("\n");
sb.append("import " + fileName + ".Privatedata.*");
sb.append("\n");
sb.append("import " + fileName + ".Recipients.*");
sb.append("\n");
sb.append("import " + fileName + ".Enforcements.*");
sb.append("\n");
sb.append("import " + fileName + ".Services.*");
sb.append("\n");
sb.append("\n");
// generateMetadataRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Main.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateStatementsFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Statements {");
sb.append("\n");
sb.append("\n");
sb.append("import " + fileName + ".Privatedata.*");
sb.append("\n");
sb.append("import " + fileName + ".Services.*");
sb.append("\n");
sb.append("import " + fileName + ".Enforcements.*");
sb.append("\n");
sb.append("import " + fileName + ".Recipients.*");
sb.append("\n");
sb.append("\n");
// generateStatementsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Statements.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generatePrivateDataFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Privatedata {");
sb.append("\n");
sb.append("\n");
generatePrivateDataRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Privatedata.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateServicesFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Services {");
sb.append("\n");
sb.append("\n");
sb.append("import " + fileName + ".Privatedata.*");
sb.append("\n");
sb.append("\n");
generateServicesRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Services.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateEnforcementsFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Enforcements {");
sb.append("\n");
sb.append("\n");
generateEnforcementsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Enforcements.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateRecipientsFile(IFolder srcGenFolder, String filePath, String fileName)
throws Exception {
StringBuilder sb = new StringBuilder();
InputStream inp = new FileInputStream(filePath);
Workbook wb = WorkbookFactory.create(inp);
sb.append("Package " + fileName + ".Recipients {");
sb.append("\n");
sb.append("\n");
generateRecipientsRegion(wb, sb);
sb.deleteCharAt(sb.length() - 1);
sb.append("}");
IFile file = srcGenFolder.getFile(fileName + ".Recipients.rslil");
InputStream source = new ByteArrayInputStream(sb.toString().getBytes());
if (!file.exists()) {
file.create(source, IResource.FORCE, null);
} else {
file.setContents(source, IResource.FORCE, null);
}
}
private void generateProjectRegion(Workbook wb, StringBuilder sb) {
// Get the Home Sheet
Sheet sheet = wb.getSheet("home");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header rows
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
Row row = rowIt.next();
Cell cellId = row.getCell(0);
String id = formatId(cellId.getStringCellValue());
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
sb.append("\tProject " + id + " {");
sb.append("\n");
sb.append("\t\tName \"" + name + "\"");
sb.append("\n");
sb.append("\t\tDescription \"" + description + "\"");
sb.append("\n\t}");
sb.append("\n\n");
}
private void generateGlossaryRegion(Workbook wb, StringBuilder sb) {
// Get the Glossary Sheet
Sheet sheet = wb.getSheet("glossary");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header rows
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
String id = formatId(cellId.getStringCellValue());
if (!id.isEmpty()) {
Cell cellName = row.getCell(1);
String name= cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
if (type.contains(";")) {
String aux = "";
for (String s : type.split("; ")) {
aux += s.substring(0, 1).toUpperCase() + s.substring(1);
aux += ", ";
}
type = aux.substring(0, aux.length() - 2);
} else {
type = type.substring(0, 1).toUpperCase() + type.substring(1);
}
Cell cellAcronym = row.getCell(4);
String acronym = cellAcronym.getStringCellValue();
Cell cellPOS = row.getCell(5);
String pos = cellPOS.getStringCellValue();
pos = pos.substring(0, 1).toUpperCase() + pos.substring(1);
Cell cellSynset = row.getCell(6);
String synset = cellSynset.getStringCellValue();
// Term Relation Type
// Term Relation
sb.append("\tGlossaryTerm " + id + " {");
sb.append("\n");
if (!name.isEmpty()) {
sb.append("\t\tName \"" + name + "\"");
sb.append("\n");
}
if (!description.isEmpty()) {
sb.append("\t\tDescription \"" + description + "\"");
sb.append("\n");
}
sb.append("\t\tType " + type);
sb.append("\n");
if (!acronym.isEmpty()) {
sb.append("\t\tAcronym \"" + acronym + "\"");
sb.append("\n");
}
if (!pos.isEmpty()) {
sb.append("\t\tPOS " + pos);
sb.append("\n");
}
if (!synset.isEmpty()) {
sb.append("\t\tSynset \"" + synset + "\"");
// sb.append("\n");
}
// TODO: Add Term Relations
sb.append("\n\t}");
sb.append("\n\n");
}
}
else
break;
}
}
private void generatePrivateDataRegion(Workbook wb, StringBuilder sb) {
// Get the Private Data Sheet
Sheet sheet = wb.getSheet("PrivateData");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
int id = (int) cellId.getNumericCellValue();
Cell cellType = row.getCell(1);
String type = cellType.getStringCellValue().replaceAll(" ", "");
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellAttributes = row.getCell(3);
String attributes = cellAttributes.getStringCellValue();
sb.append("PrivateData PD" + id + " {");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
sb.append("\tType " + type);
sb.append("\n");
for (String a : attributes.split(",\n")) {
a = a.substring(0, 1).toUpperCase() + a.substring(1);
sb.append("\tAttribute \"" + a + "\" {");
sb.append("\n");
sb.append("\t\tDescription \"" + a + "\"");
sb.append("\n\t}");
sb.append("\n");
}
// Delete last '\n'
// sb.deleteCharAt(sb.length() - 2);
sb.deleteCharAt(sb.length() - 1);
sb.append("\n}");
sb.append("\n\n");
}
else
break;
}
}
private void generateServicesRegion(Workbook wb, StringBuilder sb) {
// Get the Services Sheet
Sheet sheet = wb.getSheet("Services");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
int id = (int) cellId.getNumericCellValue();
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellPrivateData = row.getCell(3);
Cell cellPartOf = row.getCell(4);
sb.append("Service S" + id + " {");
sb.append("\n");
sb.append("\tName \"" + name + "\"");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
if (cellPrivateData.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int privateData = (int) cellPrivateData.getNumericCellValue();
sb.append("\tRefersTo PrivateData PD" + privateData);
sb.append("\n");
} else if (cellPrivateData.getCellType() == Cell.CELL_TYPE_STRING) {
String privateData = cellPrivateData.getStringCellValue();
if (privateData.equals("All")) {
sb.append("\tRefersTo PrivateData All");
} else {
sb.append("\tRefersTo PrivateData ");
for (String s : privateData.split(", ")) {
sb.append("PD" + s + ",");
}
// Delete last ','
sb.deleteCharAt(sb.length() - 1);
}
sb.append("\n");
}
if (cellPartOf.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int partOf = (int) cellPartOf.getNumericCellValue();
sb.append("\tService_Part S" + partOf);
sb.append("\n");
}
sb.append("}");
sb.append("\n\n");
}
else
break;
}
}
private void generateEnforcementsRegion(Workbook wb, StringBuilder sb) {
// Get the Enforcements Sheet
Sheet sheet = wb.getSheet("Enforcements");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
int id = (int) cellId.getNumericCellValue();
Cell cellName = row.getCell(1);
String name = cellName.getStringCellValue();
Cell cellDescription = row.getCell(2);
String description = cellDescription.getStringCellValue();
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
sb.append("Enforcement En" + id + " {");
sb.append("\n");
sb.append("\tName \"" + name + "\"");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
sb.append("\tType " + type);
sb.append("\n}");
sb.append("\n\n");
}
else
break;
}
}
private void generateRecipientsRegion(Workbook wb, StringBuilder sb) {
// Get the Recipients Sheet
Sheet sheet = wb.getSheet("Recipients");
Iterator<Row> rowIt = sheet.rowIterator();
// Ignore the Header row
rowIt.next();
while (rowIt.hasNext()) {
Row row = rowIt.next();
Cell cellId = row.getCell(0);
if (cellId != null) {
if (cellId.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int id = (int) cellId.getNumericCellValue();
Cell cellDescription = row.getCell(1);
String description = cellDescription.getStringCellValue();
Cell cellScope = row.getCell(2);
String scope = cellScope.getStringCellValue();
if (scope.contains("/")) {
scope = "Internal/External";
} else {
scope = scope.substring(0, 1).toUpperCase() + scope.substring(1);
}
Cell cellType = row.getCell(3);
String type = cellType.getStringCellValue();
if (type.contains("/")) {
type = "Individual/Organization";
} else {
type = type.substring(0, 1).toUpperCase() + type.substring(1);
}
Cell cellPartOf = row.getCell(4);
sb.append("Recipient R" + id + " {");
sb.append("\n");
sb.append("\tName \"" + description + "\"");
sb.append("\n");
sb.append("\tDescription \"" + description + "\"");
sb.append("\n");
if (cellPartOf.getCellType() == Cell.CELL_TYPE_NUMERIC) {
int partOf = (int) cellPartOf.getNumericCellValue();
sb.append("\tRecipient_Part R" + partOf);
sb.append("\n");
}
sb.append("\tScope " + scope);
sb.append("\n");
sb.append("\tType " + type);
sb.append("\n}");
sb.append("\n\n");
}
}
else
break;
}
}
private String formatId(String id) {
return id.replaceAll(" ", "_").replaceAll("-", "_");
}
}
|
Added Stakeholders region gen
|
rslingo.rslil.ui/src/rslingo/rslil/ui/handlers/ImportExcelHandler.java
|
Added Stakeholders region gen
|
|
Java
|
lgpl-2.1
|
6b0ab287960e20afa524dca389c816d4a9d5f61a
| 0
|
ekiwi/jade-mirror,ekiwi/jade-mirror,ekiwi/jade-mirror,ekiwi/jade-mirror
|
/*
* $Log$
* Revision 1.7 1998/11/03 00:31:26 rimassa
* A cosmetic change in a comment.
*
* Revision 1.6 1998/10/31 16:34:51 rimassa
* Removed useless inheritance from AWTEvent class.
*
* Revision 1.5 1998/10/04 18:01:04 rimassa
* Added a 'Log:' field to every source file.
*
*/
package jade.core;
import jade.lang.acl.*;
/***************************************************************
Name: CommEvent
Responsibilities and Collaborations:
+ Objectifies the reception event, embedding the received ACL
message.
(ACLMessage)
+ Holds a list of recipients agent to allow trasparent message
multicasting
(AgentGroup)
******************************************************************/
public class CommEvent {
private ACLMessage message;
private AgentGroup recipients;
public CommEvent(CommBroadcaster source, ACLMessage message) {
// Message cloning is necessary for intra-VM messaging, since no
// message serialization is carried out in that case
this.message = (ACLMessage)message.clone();
recipients = null;
}
public CommEvent(CommBroadcaster source, ACLMessage message, AgentGroup group) {
this(source, message);
recipients = group;
}
public ACLMessage getMessage() {
return message;
}
public boolean isMulticast() {
return recipients != null;
}
public AgentGroup getRecipients() {
return recipients;
}
}
|
src/jade/core/CommEvent.java
|
/*
* $Log$
* Revision 1.6 1998/10/31 16:34:51 rimassa
* Removed useless inheritance from AWTEvent class.
*
* Revision 1.5 1998/10/04 18:01:04 rimassa
* Added a 'Log:' field to every source file.
*
*/
package jade.core;
import jade.lang.acl.*;
/***************************************************************
Name: CommEvent
Responsibilities and Collaborations:
+ Objectifies the reception event, embedding the received ACL
message.
(ACLMessage)
+ Holds a list of recipients agent to allow trasparent message
multicasting
(AgentGroup)
******************************************************************/
public class CommEvent {
private ACLMessage message;
private AgentGroup recipients;
public CommEvent(CommBroadcaster source, ACLMessage message) {
// Message cloning is Necessary for intra-VM messaging, since no
// message serialization is carried out in that case
this.message = (ACLMessage)message.clone();
recipients = null;
}
public CommEvent(CommBroadcaster source, ACLMessage message, AgentGroup group) {
this(source, message);
recipients = group;
}
public ACLMessage getMessage() {
return message;
}
public boolean isMulticast() {
return recipients != null;
}
public AgentGroup getRecipients() {
return recipients;
}
}
|
A cosmetic change in a comment.
|
src/jade/core/CommEvent.java
|
A cosmetic change in a comment.
|
|
Java
|
lgpl-2.1
|
e1a6c03f2aab653f35a6a2f0348819c11e4ceb9d
| 0
|
xph906/SootNew,mbenz89/soot,cfallin/soot,mbenz89/soot,plast-lab/soot,anddann/soot,anddann/soot,xph906/SootNew,cfallin/soot,xph906/SootNew,plast-lab/soot,anddann/soot,cfallin/soot,cfallin/soot,mbenz89/soot,mbenz89/soot,plast-lab/soot,anddann/soot,xph906/SootNew
|
/* Soot - a J*va Optimization Framework
* Copyright (C) 1997-1999 Raja Vallee-Rai
* Copyright (C) 2004 Ondrej Lhotak
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* Modified by the Sable Research Group and others 1997-1999.
* See the 'credits' file distributed with Soot for the complete list of
* contributors. (Soot is distributed at http://www.sable.mcgill.ca/soot)
*/
package soot;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import soot.jimple.toolkits.callgraph.CallGraph;
import soot.jimple.toolkits.callgraph.ContextSensitiveCallGraph;
import soot.jimple.toolkits.callgraph.ReachableMethods;
import soot.jimple.toolkits.pointer.DumbPointerAnalysis;
import soot.jimple.toolkits.pointer.SideEffectAnalysis;
import soot.options.CGOptions;
import soot.options.Options;
import soot.toolkits.exceptions.PedanticThrowAnalysis;
import soot.toolkits.exceptions.ThrowAnalysis;
import soot.toolkits.exceptions.UnitThrowAnalysis;
import soot.util.ArrayNumberer;
import soot.util.Chain;
import soot.util.HashChain;
import soot.util.MapNumberer;
import soot.util.Numberer;
import soot.util.SingletonList;
import soot.util.StringNumberer;
/** Manages the SootClasses of the application being analyzed. */
public class Scene //extends AbstractHost
{
public Scene ( Singletons.Global g )
{
setReservedNames();
// load soot.class.path system property, if defined
String scp = System.getProperty("soot.class.path");
if (scp != null)
setSootClassPath(scp);
kindNumberer.add( Kind.INVALID );
kindNumberer.add( Kind.STATIC );
kindNumberer.add( Kind.VIRTUAL );
kindNumberer.add( Kind.INTERFACE );
kindNumberer.add( Kind.SPECIAL );
kindNumberer.add( Kind.CLINIT );
kindNumberer.add( Kind.THREAD );
kindNumberer.add( Kind.FINALIZE );
kindNumberer.add( Kind.INVOKE_FINALIZE );
kindNumberer.add( Kind.PRIVILEGED );
kindNumberer.add( Kind.NEWINSTANCE );
addSootBasicClasses();
}
public static Scene v() { return G.v().soot_Scene (); }
Chain<SootClass> classes = new HashChain<SootClass>();
Chain<SootClass> applicationClasses = new HashChain<SootClass>();
Chain<SootClass> libraryClasses = new HashChain<SootClass>();
Chain<SootClass> phantomClasses = new HashChain<SootClass>();
private final Map<String,Type> nameToClass = new HashMap<String,Type>();
ArrayNumberer kindNumberer = new ArrayNumberer();
ArrayNumberer typeNumberer = new ArrayNumberer();
ArrayNumberer methodNumberer = new ArrayNumberer();
Numberer unitNumberer = new MapNumberer();
Numberer contextNumberer = null;
ArrayNumberer fieldNumberer = new ArrayNumberer();
ArrayNumberer classNumberer = new ArrayNumberer();
StringNumberer subSigNumberer = new StringNumberer();
ArrayNumberer localNumberer = new ArrayNumberer();
private Hierarchy activeHierarchy;
private FastHierarchy activeFastHierarchy;
private CallGraph activeCallGraph;
private ReachableMethods reachableMethods;
private PointsToAnalysis activePointsToAnalysis;
private SideEffectAnalysis activeSideEffectAnalysis;
private List<SootMethod> entryPoints;
boolean allowsPhantomRefs = false;
SootClass mainClass;
String sootClassPath = null;
// Two default values for constructing ExceptionalUnitGraphs:
private ThrowAnalysis defaultThrowAnalysis = null;
public void setMainClass(SootClass m)
{
mainClass = m;
if(!m.declaresMethod(getSubSigNumberer().findOrAdd( "void main(java.lang.String[])" ))) {
throw new RuntimeException("Main-class has no main method!");
}
}
Set<String> reservedNames = new HashSet<String>();
/**
Returns a set of tokens which are reserved. Any field, class, method, or local variable with such a name will be quoted.
*/
public Set<String> getReservedNames()
{
return reservedNames;
}
/**
If this name is in the set of reserved names, then return a quoted version of it. Else pass it through.
*/
public String quotedNameOf(String s)
{
if(reservedNames.contains(s))
return "\'" + s + "\'";
else
return s;
}
public SootClass getMainClass()
{
if(mainClass == null) {
setMainClassFromOptions();
}
if(mainClass == null)
throw new RuntimeException("There is no main class set!");
return mainClass;
}
public SootMethod getMainMethod() {
if(mainClass==null) {
throw new RuntimeException("There is no main class set!");
}
if (!mainClass.declaresMethod ("main", new SingletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v())) {
throw new RuntimeException("Main class declares no main method!");
}
return mainClass.getMethod ("main", new SingletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v());
}
public void setSootClassPath(String p)
{
sootClassPath = p;
SourceLocator.v().invalidateClassPath();
}
public String getSootClassPath()
{
if( sootClassPath == null ) {
String optionscp = Options.v().soot_classpath();
if( optionscp.length() > 0 )
sootClassPath = optionscp;
String defaultSootClassPath = defaultClassPath();
//if no classpath is given on the command line, take the default
if( sootClassPath == null ) {
sootClassPath = defaultSootClassPath;
} else {
//if one is given...
if(Options.v().prepend_classpath()) {
//if the prepend flag is set, append the default classpath
sootClassPath += File.pathSeparator + defaultSootClassPath;
}
//else, leave it as it is
}
}
return sootClassPath;
}
public String defaultClassPath() {
StringBuffer sb = new StringBuffer();
sb.append(System.getProperty("java.class.path")+File.pathSeparator);
if(System.getProperty("os.name").equals("Mac OS X")) {
//in Mac OS X, rt.jar is split into classes.jar and ui.jar
sb.append(File.pathSeparator);
sb.append(System.getProperty("java.home"));
sb.append(File.separator);
sb.append("..");
sb.append(File.separator);
sb.append("Classes");
sb.append(File.separator);
sb.append("classes.jar");
sb.append(File.pathSeparator);
sb.append(System.getProperty("java.home"));
sb.append(File.separator);
sb.append("..");
sb.append(File.separator);
sb.append("Classes");
sb.append(File.separator);
sb.append("ui.jar");
} else {
sb.append(File.pathSeparator);
sb.append(System.getProperty("java.home"));
sb.append(File.separator);
sb.append("lib");
sb.append(File.separator);
sb.append("rt.jar");
}
if(Options.v().whole_program() || Options.v().output_format()==Options.output_format_dava) {
//add jce.jar, which is necessary for whole program mode
//(java.security.Signature from rt.jar import javax.crypto.Cipher from jce.jar
sb.append(File.pathSeparator+
System.getProperty("java.home")+File.separator+"lib"+File.separator+"jce.jar");
}
return sb.toString();
}
private int stateCount;
public int getState() { return this.stateCount; }
private void modifyHierarchy() {
stateCount++;
activeHierarchy = null;
activeFastHierarchy = null;
activeSideEffectAnalysis = null;
activePointsToAnalysis = null;
}
public void addClass(SootClass c)
{
if(c.isInScene())
throw new RuntimeException("already managed: "+c.getName());
if(containsClass(c.getName()))
throw new RuntimeException("duplicate class: "+c.getName());
classes.add(c);
c.setLibraryClass();
nameToClass.put(c.getName(), c.getType());
c.getType().setSootClass(c);
c.setInScene(true);
modifyHierarchy();
}
public void removeClass(SootClass c)
{
if(!c.isInScene())
throw new RuntimeException();
classes.remove(c);
if(c.isLibraryClass()) {
libraryClasses.remove(c);
} else if(c.isPhantomClass()) {
phantomClasses.remove(c);
} else if(c.isApplicationClass()) {
applicationClasses.remove(c);
}
c.getType().setSootClass(null);
c.setInScene(false);
modifyHierarchy();
}
public boolean containsClass(String className)
{
RefType type = (RefType) nameToClass.get(className);
if( type == null ) return false;
if( !type.hasSootClass() ) return false;
SootClass c = type.getSootClass();
return c.isInScene();
}
public boolean containsType(String className)
{
return nameToClass.containsKey(className);
}
public String signatureToClass(String sig) {
if( sig.charAt(0) != '<' ) throw new RuntimeException("oops "+sig);
if( sig.charAt(sig.length()-1) != '>' ) throw new RuntimeException("oops "+sig);
int index = sig.indexOf( ":" );
if( index < 0 ) throw new RuntimeException("oops "+sig);
return sig.substring(1,index);
}
public String signatureToSubsignature(String sig) {
if( sig.charAt(0) != '<' ) throw new RuntimeException("oops "+sig);
if( sig.charAt(sig.length()-1) != '>' ) throw new RuntimeException("oops "+sig);
int index = sig.indexOf( ":" );
if( index < 0 ) throw new RuntimeException("oops "+sig);
return sig.substring(index+2,sig.length()-1);
}
private SootField grabField(String fieldSignature)
{
String cname = signatureToClass( fieldSignature );
String fname = signatureToSubsignature( fieldSignature );
if( !containsClass(cname) ) return null;
SootClass c = getSootClass(cname);
if( !c.declaresField( fname ) ) return null;
return c.getField( fname );
}
public boolean containsField(String fieldSignature)
{
return grabField(fieldSignature) != null;
}
private SootMethod grabMethod(String methodSignature)
{
String cname = signatureToClass( methodSignature );
String mname = signatureToSubsignature( methodSignature );
if( !containsClass(cname) ) return null;
SootClass c = getSootClass(cname);
if( !c.declaresMethod( mname ) ) return null;
return c.getMethod( mname );
}
public boolean containsMethod(String methodSignature)
{
return grabMethod(methodSignature) != null;
}
public SootField getField(String fieldSignature)
{
SootField f = grabField( fieldSignature );
if (f != null)
return f;
throw new RuntimeException("tried to get nonexistent field "+fieldSignature);
}
public SootMethod getMethod(String methodSignature)
{
SootMethod m = grabMethod( methodSignature );
if (m != null)
return m;
throw new RuntimeException("tried to get nonexistent method "+methodSignature);
}
/**
* Attempts to load the given class and all of the required support classes.
* Returns the original class if it was loaded, or null otherwise.
*/
public SootClass tryLoadClass(String className, int desiredLevel)
{
/*
if(Options.v().time())
Main.v().resolveTimer.start();
*/
setPhantomRefs(true);
//SootResolver resolver = new SootResolver();
if( !getPhantomRefs()
&& SourceLocator.v().getClassSource(className) == null ) {
setPhantomRefs(false);
return null;
}
SootResolver resolver = SootResolver.v();
SootClass toReturn = resolver.resolveClass(className, desiredLevel);
setPhantomRefs(false);
return toReturn;
/*
if(Options.v().time())
Main.v().resolveTimer.end(); */
}
/**
* Loads the given class and all of the required support classes. Returns the first class.
*/
public SootClass loadClassAndSupport(String className)
{
SootClass ret = loadClass(className, SootClass.SIGNATURES);
if( !ret.isPhantom() ) ret = loadClass(className, SootClass.BODIES);
return ret;
}
public SootClass loadClass(String className, int desiredLevel)
{
/*
if(Options.v().time())
Main.v().resolveTimer.start();
*/
setPhantomRefs(true);
//SootResolver resolver = new SootResolver();
SootResolver resolver = SootResolver.v();
SootClass toReturn = resolver.resolveClass(className, desiredLevel);
setPhantomRefs(false);
return toReturn;
/*
if(Options.v().time())
Main.v().resolveTimer.end(); */
}
/**
* Returns the RefType with the given className.
* @throws IllegalStateException if the RefType for this class cannot be found.
* Use {@link #containsType(String)} to check if type is registered
*/
public RefType getRefType(String className)
{
RefType refType = (RefType) nameToClass.get(className);
if(refType==null) {
throw new IllegalStateException("RefType "+className+" not loaded. " +
"If you tried to get the RefType of a library class, did you call loadNecessaryClasses()? " +
"Otherwise please check Soot's classpath.");
}
return refType;
}
/**
* Returns the {@link RefType} for {@link Object}.
*/
public RefType getObjectType() {
return getRefType("java.lang.Object");
}
/**
* Returns the RefType with the given className.
*/
public void addRefType(RefType type)
{
nameToClass.put(type.getClassName(), type);
}
/**
* Returns the SootClass with the given className.
*/
public SootClass getSootClass(String className) {
RefType type = (RefType) nameToClass.get(className);
SootClass toReturn = null;
if (type != null)
toReturn = type.getSootClass();
if (toReturn != null) {
return toReturn;
} else if (allowsPhantomRefs()) {
SootClass c = new SootClass(className);
c.setPhantom(true);
addClass(c);
return c;
} else {
throw new RuntimeException(System.getProperty("line.separator")
+ "Aborting: can't find classfile " + className);
}
}
/**
* Returns an backed chain of the classes in this manager.
*/
public Chain<SootClass> getClasses()
{
return classes;
}
/* The four following chains are mutually disjoint. */
/**
* Returns a chain of the application classes in this scene.
* These classes are the ones which can be freely analysed & modified.
*/
public Chain<SootClass> getApplicationClasses()
{
return applicationClasses;
}
/**
* Returns a chain of the library classes in this scene.
* These classes can be analysed but not modified.
*/
public Chain<SootClass> getLibraryClasses()
{
return libraryClasses;
}
/**
* Returns a chain of the phantom classes in this scene.
* These classes are referred to by other classes, but cannot be loaded.
*/
public Chain<SootClass> getPhantomClasses()
{
return phantomClasses;
}
Chain<SootClass> getContainingChain(SootClass c)
{
if (c.isApplicationClass())
return getApplicationClasses();
else if (c.isLibraryClass())
return getLibraryClasses();
else if (c.isPhantomClass())
return getPhantomClasses();
return null;
}
/****************************************************************************/
/**
Retrieves the active side-effect analysis
*/
public SideEffectAnalysis getSideEffectAnalysis()
{
if(!hasSideEffectAnalysis()) {
setSideEffectAnalysis( new SideEffectAnalysis(
getPointsToAnalysis(),
getCallGraph() ) );
}
return activeSideEffectAnalysis;
}
/**
Sets the active side-effect analysis
*/
public void setSideEffectAnalysis(SideEffectAnalysis sea)
{
activeSideEffectAnalysis = sea;
}
public boolean hasSideEffectAnalysis()
{
return activeSideEffectAnalysis != null;
}
public void releaseSideEffectAnalysis()
{
activeSideEffectAnalysis = null;
}
/****************************************************************************/
/**
Retrieves the active pointer analysis
*/
public PointsToAnalysis getPointsToAnalysis()
{
if(!hasPointsToAnalysis()) {
return DumbPointerAnalysis.v();
}
return activePointsToAnalysis;
}
/**
Sets the active pointer analysis
*/
public void setPointsToAnalysis(PointsToAnalysis pa)
{
activePointsToAnalysis = pa;
}
public boolean hasPointsToAnalysis()
{
return activePointsToAnalysis != null;
}
public void releasePointsToAnalysis()
{
activePointsToAnalysis = null;
}
/****************************************************************************/
/** Makes a new fast hierarchy is none is active, and returns the active
* fast hierarchy. */
public FastHierarchy getOrMakeFastHierarchy() {
if(!hasFastHierarchy() ) {
setFastHierarchy( new FastHierarchy() );
}
return getFastHierarchy();
}
/**
Retrieves the active fast hierarchy
*/
public FastHierarchy getFastHierarchy()
{
if(!hasFastHierarchy())
throw new RuntimeException("no active FastHierarchy present for scene");
return activeFastHierarchy;
}
/**
Sets the active hierarchy
*/
public void setFastHierarchy(FastHierarchy hierarchy)
{
activeFastHierarchy = hierarchy;
}
public boolean hasFastHierarchy()
{
return activeFastHierarchy != null;
}
public void releaseFastHierarchy()
{
activeFastHierarchy = null;
}
/****************************************************************************/
/**
Retrieves the active hierarchy
*/
public Hierarchy getActiveHierarchy()
{
if(!hasActiveHierarchy())
//throw new RuntimeException("no active Hierarchy present for scene");
setActiveHierarchy( new Hierarchy() );
return activeHierarchy;
}
/**
Sets the active hierarchy
*/
public void setActiveHierarchy(Hierarchy hierarchy)
{
activeHierarchy = hierarchy;
}
public boolean hasActiveHierarchy()
{
return activeHierarchy != null;
}
public void releaseActiveHierarchy()
{
activeHierarchy = null;
}
/** Get the set of entry points that are used to build the call graph. */
public List<SootMethod> getEntryPoints() {
if( entryPoints == null ) {
entryPoints = EntryPoints.v().all();
}
return entryPoints;
}
/** Change the set of entry point methods used to build the call graph. */
public void setEntryPoints( List<SootMethod> entryPoints ) {
this.entryPoints = entryPoints;
}
private ContextSensitiveCallGraph cscg;
public ContextSensitiveCallGraph getContextSensitiveCallGraph() {
if(cscg == null) throw new RuntimeException("No context-sensitive call graph present in Scene. You can bulid one with Paddle.");
return cscg;
}
public void setContextSensitiveCallGraph(ContextSensitiveCallGraph cscg) {
this.cscg = cscg;
}
public CallGraph getCallGraph()
{
if(!hasCallGraph()) {
throw new RuntimeException( "No call graph present in Scene. Maybe you want Whole Program mode (-w)." );
}
return activeCallGraph;
}
public void setCallGraph(CallGraph cg)
{
reachableMethods = null;
activeCallGraph = cg;
}
public boolean hasCallGraph()
{
return activeCallGraph != null;
}
public void releaseCallGraph()
{
activeCallGraph = null;
reachableMethods = null;
}
public ReachableMethods getReachableMethods() {
if( reachableMethods == null ) {
reachableMethods = new ReachableMethods(
getCallGraph(), new ArrayList<MethodOrMethodContext>(getEntryPoints()) );
}
reachableMethods.update();
return reachableMethods;
}
public void setReachableMethods( ReachableMethods rm ) {
reachableMethods = rm;
}
public boolean hasReachableMethods() {
return reachableMethods != null;
}
public void releaseReachableMethods() {
reachableMethods = null;
}
public boolean getPhantomRefs()
{
//if( !Options.v().allow_phantom_refs() ) return false;
//return allowsPhantomRefs;
return Options.v().allow_phantom_refs();
}
public void setPhantomRefs(boolean value)
{
allowsPhantomRefs = value;
}
public boolean allowsPhantomRefs()
{
return getPhantomRefs();
}
public Numberer kindNumberer() { return kindNumberer; }
public ArrayNumberer getTypeNumberer() { return typeNumberer; }
public ArrayNumberer getMethodNumberer() { return methodNumberer; }
public Numberer getContextNumberer() { return contextNumberer; }
public Numberer getUnitNumberer() { return unitNumberer; }
public ArrayNumberer getFieldNumberer() { return fieldNumberer; }
public ArrayNumberer getClassNumberer() { return classNumberer; }
public StringNumberer getSubSigNumberer() { return subSigNumberer; }
public ArrayNumberer getLocalNumberer() { return localNumberer; }
public void setContextNumberer( Numberer n ) {
if( contextNumberer != null )
throw new RuntimeException(
"Attempt to set context numberer when it is already set." );
contextNumberer = n;
}
/**
* Returns the {@link ThrowAnalysis} to be used by default when
* constructing CFGs which include exceptional control flow.
*
* @return the default {@link ThrowAnalysis}
*/
public ThrowAnalysis getDefaultThrowAnalysis()
{
if( defaultThrowAnalysis == null ) {
int optionsThrowAnalysis = Options.v().throw_analysis();
switch (optionsThrowAnalysis) {
case Options.throw_analysis_pedantic:
defaultThrowAnalysis = PedanticThrowAnalysis.v();
break;
case Options.throw_analysis_unit:
defaultThrowAnalysis = UnitThrowAnalysis.v();
break;
default:
throw new IllegalStateException("Options.v().throw_analysi() == " +
Options.v().throw_analysis());
}
}
return defaultThrowAnalysis;
}
/**
* Sets the {@link ThrowAnalysis} to be used by default when
* constructing CFGs which include exceptional control flow.
*
* @param the default {@link ThrowAnalysis}.
*/
public void setDefaultThrowAnalysis(ThrowAnalysis ta)
{
defaultThrowAnalysis = ta;
}
private void setReservedNames()
{
Set<String> rn = getReservedNames();
rn.add("newarray");
rn.add("newmultiarray");
rn.add("nop");
rn.add("ret");
rn.add("specialinvoke");
rn.add("staticinvoke");
rn.add("tableswitch");
rn.add("virtualinvoke");
rn.add("null_type");
rn.add("unknown");
rn.add("cmp");
rn.add("cmpg");
rn.add("cmpl");
rn.add("entermonitor");
rn.add("exitmonitor");
rn.add("interfaceinvoke");
rn.add("lengthof");
rn.add("lookupswitch");
rn.add("neg");
rn.add("if");
rn.add("abstract");
rn.add("annotation");
rn.add("boolean");
rn.add("break");
rn.add("byte");
rn.add("case");
rn.add("catch");
rn.add("char");
rn.add("class");
rn.add("final");
rn.add("native");
rn.add("public");
rn.add("protected");
rn.add("private");
rn.add("static");
rn.add("synchronized");
rn.add("transient");
rn.add("volatile");
rn.add("interface");
rn.add("void");
rn.add("short");
rn.add("int");
rn.add("long");
rn.add("float");
rn.add("double");
rn.add("extends");
rn.add("implements");
rn.add("breakpoint");
rn.add("default");
rn.add("goto");
rn.add("instanceof");
rn.add("new");
rn.add("return");
rn.add("throw");
rn.add("throws");
rn.add("null");
rn.add("from");
rn.add("to");
}
private final Set<String>[] basicclasses=new Set[4];
private void addSootBasicClasses() {
basicclasses[SootClass.HIERARCHY] = new HashSet<String>();
basicclasses[SootClass.SIGNATURES] = new HashSet<String>();
basicclasses[SootClass.BODIES] = new HashSet<String>();
addBasicClass("java.lang.Object");
addBasicClass("java.lang.Class", SootClass.SIGNATURES);
addBasicClass("java.lang.Void", SootClass.SIGNATURES);
addBasicClass("java.lang.Boolean", SootClass.SIGNATURES);
addBasicClass("java.lang.Byte", SootClass.SIGNATURES);
addBasicClass("java.lang.Character", SootClass.SIGNATURES);
addBasicClass("java.lang.Short", SootClass.SIGNATURES);
addBasicClass("java.lang.Integer", SootClass.SIGNATURES);
addBasicClass("java.lang.Long", SootClass.SIGNATURES);
addBasicClass("java.lang.Float", SootClass.SIGNATURES);
addBasicClass("java.lang.Double", SootClass.SIGNATURES);
addBasicClass("java.lang.String");
addBasicClass("java.lang.StringBuffer", SootClass.SIGNATURES);
addBasicClass("java.lang.Error");
addBasicClass("java.lang.AssertionError", SootClass.SIGNATURES);
addBasicClass("java.lang.Throwable", SootClass.SIGNATURES);
addBasicClass("java.lang.NoClassDefFoundError", SootClass.SIGNATURES);
addBasicClass("java.lang.ExceptionInInitializerError");
addBasicClass("java.lang.RuntimeException");
addBasicClass("java.lang.ClassNotFoundException");
addBasicClass("java.lang.ArithmeticException");
addBasicClass("java.lang.ArrayStoreException");
addBasicClass("java.lang.ClassCastException");
addBasicClass("java.lang.IllegalMonitorStateException");
addBasicClass("java.lang.IndexOutOfBoundsException");
addBasicClass("java.lang.ArrayIndexOutOfBoundsException");
addBasicClass("java.lang.NegativeArraySizeException");
addBasicClass("java.lang.NullPointerException");
addBasicClass("java.lang.InstantiationError");
addBasicClass("java.lang.InternalError");
addBasicClass("java.lang.OutOfMemoryError");
addBasicClass("java.lang.StackOverflowError");
addBasicClass("java.lang.UnknownError");
addBasicClass("java.lang.ThreadDeath");
addBasicClass("java.lang.ClassCircularityError");
addBasicClass("java.lang.ClassFormatError");
addBasicClass("java.lang.IllegalAccessError");
addBasicClass("java.lang.IncompatibleClassChangeError");
addBasicClass("java.lang.LinkageError");
addBasicClass("java.lang.VerifyError");
addBasicClass("java.lang.NoSuchFieldError");
addBasicClass("java.lang.AbstractMethodError");
addBasicClass("java.lang.NoSuchMethodError");
addBasicClass("java.lang.UnsatisfiedLinkError");
addBasicClass("java.lang.Thread");
addBasicClass("java.lang.Runnable");
addBasicClass("java.lang.Cloneable");
addBasicClass("java.io.Serializable");
addBasicClass("java.lang.ref.Finalizer");
}
public void addBasicClass(String name) {
addBasicClass(name,SootClass.HIERARCHY);
}
public void addBasicClass(String name,int level) {
basicclasses[level].add(name);
}
/** Load just the set of basic classes soot needs, ignoring those
* specified on the command-line. You don't need to use both this and
* loadNecessaryClasses, though it will only waste time.
*/
public void loadBasicClasses() {
addReflectionTraceClasses();
for(int i=SootClass.BODIES;i>=SootClass.HIERARCHY;i--) {
for(String name: basicclasses[i]) {
tryLoadClass(name,i);
}
}
}
private void addReflectionTraceClasses() {
CGOptions options = new CGOptions( PhaseOptions.v().getPhaseOptions("cg") );
String log = options.reflection_log();
Set<String> classNames = new HashSet<String>();
if(log!=null && log.length()>0) {
BufferedReader reader;
String line="";
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(log)));
while((line=reader.readLine())!=null) {
if(line.length()==0) continue;
String[] portions = line.split(";");
String kind = portions[0];
String target = portions[1];
String source = portions[2];
String classNameDotMethodName = source.substring(0,source.indexOf("("));
String sourceClassName = classNameDotMethodName.substring(0,classNameDotMethodName.lastIndexOf("."));
classNames.add(sourceClassName);
if(kind.equals("Class.forName")) {
classNames.add(target);
} else if(kind.equals("Class.newInstance")) {
classNames.add(target);
} else if(kind.equals("Method.invoke") || kind.equals("Constructor.newInstance")) {
classNames.add(signatureToClass(target));
} else throw new RuntimeException("Unknown entry kind: "+kind);
}
} catch (Exception e) {
throw new RuntimeException("Line: '"+line+"'", e);
}
}
for (String c : classNames) {
addBasicClass(c, SootClass.BODIES);
}
}
private List<SootClass> dynamicClasses;
public Collection<SootClass> dynamicClasses() {
if(dynamicClasses==null) {
throw new IllegalStateException("Have to call loadDynamicClasses() first!");
}
return dynamicClasses;
}
private void loadNecessaryClass(String name) {
SootClass c;
c = loadClassAndSupport(name);
c.setApplicationClass();
}
/** Load the set of classes that soot needs, including those specified on the
* command-line. This is the standard way of initialising the list of
* classes soot should use.
*/
public void loadNecessaryClasses() {
loadBasicClasses();
Iterator<String> it = Options.v().classes().iterator();
while (it.hasNext()) {
String name = (String) it.next();
loadNecessaryClass(name);
}
loadDynamicClasses();
for( Iterator<String> pathIt = Options.v().process_dir().iterator(); pathIt.hasNext(); ) {
final String path = (String) pathIt.next();
for (String cl : SourceLocator.v().getClassesUnder(path)) {
loadClassAndSupport(cl).setApplicationClass();
}
}
prepareClasses();
setDoneResolving();
}
public void loadDynamicClasses() {
dynamicClasses = new ArrayList<SootClass>();
HashSet<String> dynClasses = new HashSet<String>();
dynClasses.addAll(Options.v().dynamic_class());
for( Iterator<String> pathIt = Options.v().dynamic_dir().iterator(); pathIt.hasNext(); ) {
final String path = (String) pathIt.next();
dynClasses.addAll(SourceLocator.v().getClassesUnder(path));
}
for( Iterator<String> pkgIt = Options.v().dynamic_package().iterator(); pkgIt.hasNext(); ) {
final String pkg = (String) pkgIt.next();
dynClasses.addAll(SourceLocator.v().classesInDynamicPackage(pkg));
}
for (String className : dynClasses) {
dynamicClasses.add( loadClassAndSupport(className) );
}
//remove non-concrete classes that may accidentally have been loaded
for (Iterator<SootClass> iterator = dynamicClasses.iterator(); iterator.hasNext();) {
SootClass c = iterator.next();
if(!c.isConcrete()) {
if(Options.v().verbose()) {
G.v().out.println("Warning: dynamic class "+c.getName()+" is abstract or an interface, and it will not be considered.");
}
iterator.remove();
}
}
}
/* Generate classes to process, adding or removing package marked by
* command line options.
*/
private void prepareClasses() {
LinkedList<String> excludedPackages = new LinkedList<String>();
if (Options.v().exclude() != null)
excludedPackages.addAll(Options.v().exclude());
if( !Options.v().include_all() ) {
excludedPackages.add("java.");
excludedPackages.add("sun.");
excludedPackages.add("javax.");
excludedPackages.add("com.sun.");
excludedPackages.add("com.ibm.");
excludedPackages.add("org.xml.");
excludedPackages.add("org.w3c.");
excludedPackages.add("org.apache.");
}
// Remove/add all classes from packageInclusionMask as per -i option
Chain<SootClass> processedClasses = new HashChain<SootClass>();
while(true) {
Chain<SootClass> unprocessedClasses = new HashChain<SootClass>(getClasses());
unprocessedClasses.removeAll(processedClasses);
if( unprocessedClasses.isEmpty() ) break;
processedClasses.addAll(unprocessedClasses);
for (SootClass s : unprocessedClasses) {
if( s.isPhantom() ) continue;
if(Options.v().app()) {
s.setApplicationClass();
}
if (Options.v().classes().contains(s.getName())) {
s.setApplicationClass();
continue;
}
for( Iterator<String> pkgIt = excludedPackages.iterator(); pkgIt.hasNext(); ) {
final String pkg = (String) pkgIt.next();
if (s.isApplicationClass()
&& s.getPackageName().startsWith(pkg)) {
s.setLibraryClass();
}
}
for( Iterator<String> pkgIt = Options.v().include().iterator(); pkgIt.hasNext(); ) {
final String pkg = (String) pkgIt.next();
if (s.getPackageName().startsWith(pkg))
s.setApplicationClass();
}
if(s.isApplicationClass()) {
// make sure we have the support
loadClassAndSupport(s.getName());
}
}
}
}
ArrayList<String> pkgList;
public void setPkgList(ArrayList<String> list){
pkgList = list;
}
public ArrayList<String> getPkgList(){
return pkgList;
}
/** Create an unresolved reference to a method. */
public SootMethodRef makeMethodRef(
SootClass declaringClass,
String name,
List<Type> parameterTypes,
Type returnType,
boolean isStatic ) {
return new SootMethodRefImpl(declaringClass, name, parameterTypes,
returnType, isStatic);
}
/** Create an unresolved reference to a constructor. */
public SootMethodRef makeConstructorRef(
SootClass declaringClass,
List<Type> parameterTypes) {
return makeMethodRef(declaringClass, SootMethod.constructorName,
parameterTypes, VoidType.v(), false );
}
/** Create an unresolved reference to a field. */
public SootFieldRef makeFieldRef(
SootClass declaringClass,
String name,
Type type,
boolean isStatic) {
return new AbstractSootFieldRef(declaringClass, name, type, isStatic);
}
/** Returns the list of SootClasses that have been resolved at least to
* the level specified. */
public List/*SootClass*/<SootClass> getClasses(int desiredLevel) {
List<SootClass> ret = new ArrayList<SootClass>();
for( Iterator<SootClass> clIt = getClasses().iterator(); clIt.hasNext(); ) {
final SootClass cl = (SootClass) clIt.next();
if( cl.resolvingLevel() >= desiredLevel ) ret.add(cl);
}
return ret;
}
private boolean doneResolving = false;
private boolean incrementalBuild;
public boolean doneResolving() { return doneResolving; }
public void setDoneResolving() { doneResolving = true; }
public void setMainClassFromOptions() {
if(mainClass != null) return;
if( Options.v().main_class() != null
&& Options.v().main_class().length() > 0 ) {
setMainClass(getSootClass(Options.v().main_class()));
} else {
// try to infer a main class if none is given
for (Iterator<SootClass> classIter = getApplicationClasses().iterator(); classIter.hasNext();) {
SootClass c = (SootClass) classIter.next();
if (c.declaresMethod ("main", new SingletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v()))
{
G.v().out.println("No main class given. Inferred '"+c.getName()+"' as main class.");
setMainClass(c);
break;
}
}
}
}
/**
* This method returns true when in incremental build mode.
* Other classes can query this flag and change the way in which they use the Scene,
* depending on the flag's value.
*/
public boolean isIncrementalBuild() {
return incrementalBuild;
}
public void initiateIncrementalBuild() {
this.incrementalBuild = true;
}
public void incrementalBuildFinished() {
this.incrementalBuild = false;
}
}
|
src/soot/Scene.java
|
/* Soot - a J*va Optimization Framework
* Copyright (C) 1997-1999 Raja Vallee-Rai
* Copyright (C) 2004 Ondrej Lhotak
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* Modified by the Sable Research Group and others 1997-1999.
* See the 'credits' file distributed with Soot for the complete list of
* contributors. (Soot is distributed at http://www.sable.mcgill.ca/soot)
*/
package soot;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import soot.jimple.toolkits.callgraph.CallGraph;
import soot.jimple.toolkits.callgraph.ContextSensitiveCallGraph;
import soot.jimple.toolkits.callgraph.ReachableMethods;
import soot.jimple.toolkits.pointer.DumbPointerAnalysis;
import soot.jimple.toolkits.pointer.SideEffectAnalysis;
import soot.options.CGOptions;
import soot.options.Options;
import soot.toolkits.exceptions.PedanticThrowAnalysis;
import soot.toolkits.exceptions.ThrowAnalysis;
import soot.toolkits.exceptions.UnitThrowAnalysis;
import soot.util.ArrayNumberer;
import soot.util.Chain;
import soot.util.HashChain;
import soot.util.MapNumberer;
import soot.util.Numberer;
import soot.util.SingletonList;
import soot.util.StringNumberer;
/** Manages the SootClasses of the application being analyzed. */
public class Scene //extends AbstractHost
{
public Scene ( Singletons.Global g )
{
setReservedNames();
// load soot.class.path system property, if defined
String scp = System.getProperty("soot.class.path");
if (scp != null)
setSootClassPath(scp);
kindNumberer.add( Kind.INVALID );
kindNumberer.add( Kind.STATIC );
kindNumberer.add( Kind.VIRTUAL );
kindNumberer.add( Kind.INTERFACE );
kindNumberer.add( Kind.SPECIAL );
kindNumberer.add( Kind.CLINIT );
kindNumberer.add( Kind.THREAD );
kindNumberer.add( Kind.FINALIZE );
kindNumberer.add( Kind.INVOKE_FINALIZE );
kindNumberer.add( Kind.PRIVILEGED );
kindNumberer.add( Kind.NEWINSTANCE );
addSootBasicClasses();
}
public static Scene v() { return G.v().soot_Scene (); }
Chain<SootClass> classes = new HashChain<SootClass>();
Chain<SootClass> applicationClasses = new HashChain<SootClass>();
Chain<SootClass> libraryClasses = new HashChain<SootClass>();
Chain<SootClass> phantomClasses = new HashChain<SootClass>();
private final Map<String,Type> nameToClass = new HashMap<String,Type>();
ArrayNumberer kindNumberer = new ArrayNumberer();
ArrayNumberer typeNumberer = new ArrayNumberer();
ArrayNumberer methodNumberer = new ArrayNumberer();
Numberer unitNumberer = new MapNumberer();
Numberer contextNumberer = null;
ArrayNumberer fieldNumberer = new ArrayNumberer();
ArrayNumberer classNumberer = new ArrayNumberer();
StringNumberer subSigNumberer = new StringNumberer();
ArrayNumberer localNumberer = new ArrayNumberer();
private Hierarchy activeHierarchy;
private FastHierarchy activeFastHierarchy;
private CallGraph activeCallGraph;
private ReachableMethods reachableMethods;
private PointsToAnalysis activePointsToAnalysis;
private SideEffectAnalysis activeSideEffectAnalysis;
private List<SootMethod> entryPoints;
boolean allowsPhantomRefs = false;
SootClass mainClass;
String sootClassPath = null;
// Two default values for constructing ExceptionalUnitGraphs:
private ThrowAnalysis defaultThrowAnalysis = null;
public void setMainClass(SootClass m)
{
mainClass = m;
if(!m.declaresMethod(Scene.v().getSubSigNumberer().findOrAdd( "void main(java.lang.String[])" ))) {
throw new RuntimeException("Main-class has no main method!");
}
}
Set<String> reservedNames = new HashSet<String>();
/**
Returns a set of tokens which are reserved. Any field, class, method, or local variable with such a name will be quoted.
*/
public Set<String> getReservedNames()
{
return reservedNames;
}
/**
If this name is in the set of reserved names, then return a quoted version of it. Else pass it through.
*/
public String quotedNameOf(String s)
{
if(reservedNames.contains(s))
return "\'" + s + "\'";
else
return s;
}
public SootClass getMainClass()
{
if(mainClass == null) {
setMainClassFromOptions();
}
if(mainClass == null)
throw new RuntimeException("There is no main class set!");
return mainClass;
}
public SootMethod getMainMethod() {
if(mainClass==null) {
throw new RuntimeException("There is no main class set!");
}
if (!mainClass.declaresMethod ("main", new SingletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v())) {
throw new RuntimeException("Main class declares no main method!");
}
return mainClass.getMethod ("main", new SingletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v());
}
public void setSootClassPath(String p)
{
sootClassPath = p;
SourceLocator.v().invalidateClassPath();
}
public String getSootClassPath()
{
if( sootClassPath == null ) {
String optionscp = Options.v().soot_classpath();
if( optionscp.length() > 0 )
sootClassPath = optionscp;
String defaultSootClassPath = defaultClassPath();
//if no classpath is given on the command line, take the default
if( sootClassPath == null ) {
sootClassPath = defaultSootClassPath;
} else {
//if one is given...
if(Options.v().prepend_classpath()) {
//if the prepend flag is set, append the default classpath
sootClassPath += File.pathSeparator + defaultSootClassPath;
}
//else, leave it as it is
}
}
return sootClassPath;
}
public String defaultClassPath() {
StringBuffer sb = new StringBuffer();
sb.append(System.getProperty("java.class.path")+File.pathSeparator);
if(System.getProperty("os.name").equals("Mac OS X")) {
//in Mac OS X, rt.jar is split into classes.jar and ui.jar
sb.append(File.pathSeparator);
sb.append(System.getProperty("java.home"));
sb.append(File.separator);
sb.append("..");
sb.append(File.separator);
sb.append("Classes");
sb.append(File.separator);
sb.append("classes.jar");
sb.append(File.pathSeparator);
sb.append(System.getProperty("java.home"));
sb.append(File.separator);
sb.append("..");
sb.append(File.separator);
sb.append("Classes");
sb.append(File.separator);
sb.append("ui.jar");
} else {
sb.append(File.pathSeparator);
sb.append(System.getProperty("java.home"));
sb.append(File.separator);
sb.append("lib");
sb.append(File.separator);
sb.append("rt.jar");
}
if(Options.v().whole_program() || Options.v().output_format()==Options.output_format_dava) {
//add jce.jar, which is necessary for whole program mode
//(java.security.Signature from rt.jar import javax.crypto.Cipher from jce.jar
sb.append(File.pathSeparator+
System.getProperty("java.home")+File.separator+"lib"+File.separator+"jce.jar");
}
return sb.toString();
}
private int stateCount;
public int getState() { return this.stateCount; }
private void modifyHierarchy() {
stateCount++;
activeHierarchy = null;
activeFastHierarchy = null;
activeSideEffectAnalysis = null;
activePointsToAnalysis = null;
}
public void addClass(SootClass c)
{
if(c.isInScene())
throw new RuntimeException("already managed: "+c.getName());
if(containsClass(c.getName()))
throw new RuntimeException("duplicate class: "+c.getName());
classes.add(c);
c.setLibraryClass();
nameToClass.put(c.getName(), c.getType());
c.getType().setSootClass(c);
c.setInScene(true);
modifyHierarchy();
}
public void removeClass(SootClass c)
{
if(!c.isInScene())
throw new RuntimeException();
classes.remove(c);
if(c.isLibraryClass()) {
libraryClasses.remove(c);
} else if(c.isPhantomClass()) {
phantomClasses.remove(c);
} else if(c.isApplicationClass()) {
applicationClasses.remove(c);
}
c.getType().setSootClass(null);
c.setInScene(false);
modifyHierarchy();
}
public boolean containsClass(String className)
{
RefType type = (RefType) nameToClass.get(className);
if( type == null ) return false;
if( !type.hasSootClass() ) return false;
SootClass c = type.getSootClass();
return c.isInScene();
}
public boolean containsType(String className)
{
return nameToClass.containsKey(className);
}
public String signatureToClass(String sig) {
if( sig.charAt(0) != '<' ) throw new RuntimeException("oops "+sig);
if( sig.charAt(sig.length()-1) != '>' ) throw new RuntimeException("oops "+sig);
int index = sig.indexOf( ":" );
if( index < 0 ) throw new RuntimeException("oops "+sig);
return sig.substring(1,index);
}
public String signatureToSubsignature(String sig) {
if( sig.charAt(0) != '<' ) throw new RuntimeException("oops "+sig);
if( sig.charAt(sig.length()-1) != '>' ) throw new RuntimeException("oops "+sig);
int index = sig.indexOf( ":" );
if( index < 0 ) throw new RuntimeException("oops "+sig);
return sig.substring(index+2,sig.length()-1);
}
private SootField grabField(String fieldSignature)
{
String cname = signatureToClass( fieldSignature );
String fname = signatureToSubsignature( fieldSignature );
if( !containsClass(cname) ) return null;
SootClass c = getSootClass(cname);
if( !c.declaresField( fname ) ) return null;
return c.getField( fname );
}
public boolean containsField(String fieldSignature)
{
return grabField(fieldSignature) != null;
}
private SootMethod grabMethod(String methodSignature)
{
String cname = signatureToClass( methodSignature );
String mname = signatureToSubsignature( methodSignature );
if( !containsClass(cname) ) return null;
SootClass c = getSootClass(cname);
if( !c.declaresMethod( mname ) ) return null;
return c.getMethod( mname );
}
public boolean containsMethod(String methodSignature)
{
return grabMethod(methodSignature) != null;
}
public SootField getField(String fieldSignature)
{
SootField f = grabField( fieldSignature );
if (f != null)
return f;
throw new RuntimeException("tried to get nonexistent field "+fieldSignature);
}
public SootMethod getMethod(String methodSignature)
{
SootMethod m = grabMethod( methodSignature );
if (m != null)
return m;
throw new RuntimeException("tried to get nonexistent method "+methodSignature);
}
/**
* Attempts to load the given class and all of the required support classes.
* Returns the original class if it was loaded, or null otherwise.
*/
public SootClass tryLoadClass(String className, int desiredLevel)
{
/*
if(Options.v().time())
Main.v().resolveTimer.start();
*/
Scene.v().setPhantomRefs(true);
//SootResolver resolver = new SootResolver();
if( !getPhantomRefs()
&& SourceLocator.v().getClassSource(className) == null ) {
Scene.v().setPhantomRefs(false);
return null;
}
SootResolver resolver = SootResolver.v();
SootClass toReturn = resolver.resolveClass(className, desiredLevel);
Scene.v().setPhantomRefs(false);
return toReturn;
/*
if(Options.v().time())
Main.v().resolveTimer.end(); */
}
/**
* Loads the given class and all of the required support classes. Returns the first class.
*/
public SootClass loadClassAndSupport(String className)
{
SootClass ret = loadClass(className, SootClass.SIGNATURES);
if( !ret.isPhantom() ) ret = loadClass(className, SootClass.BODIES);
return ret;
}
public SootClass loadClass(String className, int desiredLevel)
{
/*
if(Options.v().time())
Main.v().resolveTimer.start();
*/
Scene.v().setPhantomRefs(true);
//SootResolver resolver = new SootResolver();
SootResolver resolver = SootResolver.v();
SootClass toReturn = resolver.resolveClass(className, desiredLevel);
Scene.v().setPhantomRefs(false);
return toReturn;
/*
if(Options.v().time())
Main.v().resolveTimer.end(); */
}
/**
* Returns the RefType with the given className.
* @throws IllegalStateException if the RefType for this class cannot be found.
* Use {@link #containsType(String)} to check if type is registered
*/
public RefType getRefType(String className)
{
RefType refType = (RefType) nameToClass.get(className);
if(refType==null) {
throw new IllegalStateException("RefType "+className+" not loaded. " +
"If you tried to get the RefType of a library class, did you call loadNecessaryClasses()? " +
"Otherwise please check Soot's classpath.");
}
return refType;
}
/**
* Returns the {@link RefType} for {@link Object}.
*/
public RefType getObjectType() {
return getRefType("java.lang.Object");
}
/**
* Returns the RefType with the given className.
*/
public void addRefType(RefType type)
{
nameToClass.put(type.getClassName(), type);
}
/**
* Returns the SootClass with the given className.
*/
public SootClass getSootClass(String className) {
RefType type = (RefType) nameToClass.get(className);
SootClass toReturn = null;
if (type != null)
toReturn = type.getSootClass();
if (toReturn != null) {
return toReturn;
} else if (Scene.v().allowsPhantomRefs()) {
SootClass c = new SootClass(className);
c.setPhantom(true);
addClass(c);
return c;
} else {
throw new RuntimeException(System.getProperty("line.separator")
+ "Aborting: can't find classfile " + className);
}
}
/**
* Returns an backed chain of the classes in this manager.
*/
public Chain<SootClass> getClasses()
{
return classes;
}
/* The four following chains are mutually disjoint. */
/**
* Returns a chain of the application classes in this scene.
* These classes are the ones which can be freely analysed & modified.
*/
public Chain<SootClass> getApplicationClasses()
{
return applicationClasses;
}
/**
* Returns a chain of the library classes in this scene.
* These classes can be analysed but not modified.
*/
public Chain<SootClass> getLibraryClasses()
{
return libraryClasses;
}
/**
* Returns a chain of the phantom classes in this scene.
* These classes are referred to by other classes, but cannot be loaded.
*/
public Chain<SootClass> getPhantomClasses()
{
return phantomClasses;
}
Chain<SootClass> getContainingChain(SootClass c)
{
if (c.isApplicationClass())
return getApplicationClasses();
else if (c.isLibraryClass())
return getLibraryClasses();
else if (c.isPhantomClass())
return getPhantomClasses();
return null;
}
/****************************************************************************/
/**
Retrieves the active side-effect analysis
*/
public SideEffectAnalysis getSideEffectAnalysis()
{
if(!hasSideEffectAnalysis()) {
setSideEffectAnalysis( new SideEffectAnalysis(
getPointsToAnalysis(),
getCallGraph() ) );
}
return activeSideEffectAnalysis;
}
/**
Sets the active side-effect analysis
*/
public void setSideEffectAnalysis(SideEffectAnalysis sea)
{
activeSideEffectAnalysis = sea;
}
public boolean hasSideEffectAnalysis()
{
return activeSideEffectAnalysis != null;
}
public void releaseSideEffectAnalysis()
{
activeSideEffectAnalysis = null;
}
/****************************************************************************/
/**
Retrieves the active pointer analysis
*/
public PointsToAnalysis getPointsToAnalysis()
{
if(!hasPointsToAnalysis()) {
return DumbPointerAnalysis.v();
}
return activePointsToAnalysis;
}
/**
Sets the active pointer analysis
*/
public void setPointsToAnalysis(PointsToAnalysis pa)
{
activePointsToAnalysis = pa;
}
public boolean hasPointsToAnalysis()
{
return activePointsToAnalysis != null;
}
public void releasePointsToAnalysis()
{
activePointsToAnalysis = null;
}
/****************************************************************************/
/** Makes a new fast hierarchy is none is active, and returns the active
* fast hierarchy. */
public FastHierarchy getOrMakeFastHierarchy() {
if(!hasFastHierarchy() ) {
setFastHierarchy( new FastHierarchy() );
}
return getFastHierarchy();
}
/**
Retrieves the active fast hierarchy
*/
public FastHierarchy getFastHierarchy()
{
if(!hasFastHierarchy())
throw new RuntimeException("no active FastHierarchy present for scene");
return activeFastHierarchy;
}
/**
Sets the active hierarchy
*/
public void setFastHierarchy(FastHierarchy hierarchy)
{
activeFastHierarchy = hierarchy;
}
public boolean hasFastHierarchy()
{
return activeFastHierarchy != null;
}
public void releaseFastHierarchy()
{
activeFastHierarchy = null;
}
/****************************************************************************/
/**
Retrieves the active hierarchy
*/
public Hierarchy getActiveHierarchy()
{
if(!hasActiveHierarchy())
//throw new RuntimeException("no active Hierarchy present for scene");
setActiveHierarchy( new Hierarchy() );
return activeHierarchy;
}
/**
Sets the active hierarchy
*/
public void setActiveHierarchy(Hierarchy hierarchy)
{
activeHierarchy = hierarchy;
}
public boolean hasActiveHierarchy()
{
return activeHierarchy != null;
}
public void releaseActiveHierarchy()
{
activeHierarchy = null;
}
/** Get the set of entry points that are used to build the call graph. */
public List<SootMethod> getEntryPoints() {
if( entryPoints == null ) {
entryPoints = EntryPoints.v().all();
}
return entryPoints;
}
/** Change the set of entry point methods used to build the call graph. */
public void setEntryPoints( List<SootMethod> entryPoints ) {
this.entryPoints = entryPoints;
}
private ContextSensitiveCallGraph cscg;
public ContextSensitiveCallGraph getContextSensitiveCallGraph() {
if(cscg == null) throw new RuntimeException("No context-sensitive call graph present in Scene. You can bulid one with Paddle.");
return cscg;
}
public void setContextSensitiveCallGraph(ContextSensitiveCallGraph cscg) {
this.cscg = cscg;
}
public CallGraph getCallGraph()
{
if(!hasCallGraph()) {
throw new RuntimeException( "No call graph present in Scene. Maybe you want Whole Program mode (-w)." );
}
return activeCallGraph;
}
public void setCallGraph(CallGraph cg)
{
reachableMethods = null;
activeCallGraph = cg;
}
public boolean hasCallGraph()
{
return activeCallGraph != null;
}
public void releaseCallGraph()
{
activeCallGraph = null;
reachableMethods = null;
}
public ReachableMethods getReachableMethods() {
if( reachableMethods == null ) {
reachableMethods = new ReachableMethods(
getCallGraph(), new ArrayList<MethodOrMethodContext>(getEntryPoints()) );
}
reachableMethods.update();
return reachableMethods;
}
public void setReachableMethods( ReachableMethods rm ) {
reachableMethods = rm;
}
public boolean hasReachableMethods() {
return reachableMethods != null;
}
public void releaseReachableMethods() {
reachableMethods = null;
}
public boolean getPhantomRefs()
{
//if( !Options.v().allow_phantom_refs() ) return false;
//return allowsPhantomRefs;
return Options.v().allow_phantom_refs();
}
public void setPhantomRefs(boolean value)
{
allowsPhantomRefs = value;
}
public boolean allowsPhantomRefs()
{
return getPhantomRefs();
}
public Numberer kindNumberer() { return kindNumberer; }
public ArrayNumberer getTypeNumberer() { return typeNumberer; }
public ArrayNumberer getMethodNumberer() { return methodNumberer; }
public Numberer getContextNumberer() { return contextNumberer; }
public Numberer getUnitNumberer() { return unitNumberer; }
public ArrayNumberer getFieldNumberer() { return fieldNumberer; }
public ArrayNumberer getClassNumberer() { return classNumberer; }
public StringNumberer getSubSigNumberer() { return subSigNumberer; }
public ArrayNumberer getLocalNumberer() { return localNumberer; }
public void setContextNumberer( Numberer n ) {
if( contextNumberer != null )
throw new RuntimeException(
"Attempt to set context numberer when it is already set." );
contextNumberer = n;
}
/**
* Returns the {@link ThrowAnalysis} to be used by default when
* constructing CFGs which include exceptional control flow.
*
* @return the default {@link ThrowAnalysis}
*/
public ThrowAnalysis getDefaultThrowAnalysis()
{
if( defaultThrowAnalysis == null ) {
int optionsThrowAnalysis = Options.v().throw_analysis();
switch (optionsThrowAnalysis) {
case Options.throw_analysis_pedantic:
defaultThrowAnalysis = PedanticThrowAnalysis.v();
break;
case Options.throw_analysis_unit:
defaultThrowAnalysis = UnitThrowAnalysis.v();
break;
default:
throw new IllegalStateException("Options.v().throw_analysi() == " +
Options.v().throw_analysis());
}
}
return defaultThrowAnalysis;
}
/**
* Sets the {@link ThrowAnalysis} to be used by default when
* constructing CFGs which include exceptional control flow.
*
* @param the default {@link ThrowAnalysis}.
*/
public void setDefaultThrowAnalysis(ThrowAnalysis ta)
{
defaultThrowAnalysis = ta;
}
private void setReservedNames()
{
Set<String> rn = getReservedNames();
rn.add("newarray");
rn.add("newmultiarray");
rn.add("nop");
rn.add("ret");
rn.add("specialinvoke");
rn.add("staticinvoke");
rn.add("tableswitch");
rn.add("virtualinvoke");
rn.add("null_type");
rn.add("unknown");
rn.add("cmp");
rn.add("cmpg");
rn.add("cmpl");
rn.add("entermonitor");
rn.add("exitmonitor");
rn.add("interfaceinvoke");
rn.add("lengthof");
rn.add("lookupswitch");
rn.add("neg");
rn.add("if");
rn.add("abstract");
rn.add("annotation");
rn.add("boolean");
rn.add("break");
rn.add("byte");
rn.add("case");
rn.add("catch");
rn.add("char");
rn.add("class");
rn.add("final");
rn.add("native");
rn.add("public");
rn.add("protected");
rn.add("private");
rn.add("static");
rn.add("synchronized");
rn.add("transient");
rn.add("volatile");
rn.add("interface");
rn.add("void");
rn.add("short");
rn.add("int");
rn.add("long");
rn.add("float");
rn.add("double");
rn.add("extends");
rn.add("implements");
rn.add("breakpoint");
rn.add("default");
rn.add("goto");
rn.add("instanceof");
rn.add("new");
rn.add("return");
rn.add("throw");
rn.add("throws");
rn.add("null");
rn.add("from");
rn.add("to");
}
private final Set<String>[] basicclasses=new Set[4];
private void addSootBasicClasses() {
basicclasses[SootClass.HIERARCHY] = new HashSet<String>();
basicclasses[SootClass.SIGNATURES] = new HashSet<String>();
basicclasses[SootClass.BODIES] = new HashSet<String>();
addBasicClass("java.lang.Object");
addBasicClass("java.lang.Class", SootClass.SIGNATURES);
addBasicClass("java.lang.Void", SootClass.SIGNATURES);
addBasicClass("java.lang.Boolean", SootClass.SIGNATURES);
addBasicClass("java.lang.Byte", SootClass.SIGNATURES);
addBasicClass("java.lang.Character", SootClass.SIGNATURES);
addBasicClass("java.lang.Short", SootClass.SIGNATURES);
addBasicClass("java.lang.Integer", SootClass.SIGNATURES);
addBasicClass("java.lang.Long", SootClass.SIGNATURES);
addBasicClass("java.lang.Float", SootClass.SIGNATURES);
addBasicClass("java.lang.Double", SootClass.SIGNATURES);
addBasicClass("java.lang.String");
addBasicClass("java.lang.StringBuffer", SootClass.SIGNATURES);
addBasicClass("java.lang.Error");
addBasicClass("java.lang.AssertionError", SootClass.SIGNATURES);
addBasicClass("java.lang.Throwable", SootClass.SIGNATURES);
addBasicClass("java.lang.NoClassDefFoundError", SootClass.SIGNATURES);
addBasicClass("java.lang.ExceptionInInitializerError");
addBasicClass("java.lang.RuntimeException");
addBasicClass("java.lang.ClassNotFoundException");
addBasicClass("java.lang.ArithmeticException");
addBasicClass("java.lang.ArrayStoreException");
addBasicClass("java.lang.ClassCastException");
addBasicClass("java.lang.IllegalMonitorStateException");
addBasicClass("java.lang.IndexOutOfBoundsException");
addBasicClass("java.lang.ArrayIndexOutOfBoundsException");
addBasicClass("java.lang.NegativeArraySizeException");
addBasicClass("java.lang.NullPointerException");
addBasicClass("java.lang.InstantiationError");
addBasicClass("java.lang.InternalError");
addBasicClass("java.lang.OutOfMemoryError");
addBasicClass("java.lang.StackOverflowError");
addBasicClass("java.lang.UnknownError");
addBasicClass("java.lang.ThreadDeath");
addBasicClass("java.lang.ClassCircularityError");
addBasicClass("java.lang.ClassFormatError");
addBasicClass("java.lang.IllegalAccessError");
addBasicClass("java.lang.IncompatibleClassChangeError");
addBasicClass("java.lang.LinkageError");
addBasicClass("java.lang.VerifyError");
addBasicClass("java.lang.NoSuchFieldError");
addBasicClass("java.lang.AbstractMethodError");
addBasicClass("java.lang.NoSuchMethodError");
addBasicClass("java.lang.UnsatisfiedLinkError");
addBasicClass("java.lang.Thread");
addBasicClass("java.lang.Runnable");
addBasicClass("java.lang.Cloneable");
addBasicClass("java.io.Serializable");
addBasicClass("java.lang.ref.Finalizer");
}
public void addBasicClass(String name) {
addBasicClass(name,SootClass.HIERARCHY);
}
public void addBasicClass(String name,int level) {
basicclasses[level].add(name);
}
/** Load just the set of basic classes soot needs, ignoring those
* specified on the command-line. You don't need to use both this and
* loadNecessaryClasses, though it will only waste time.
*/
public void loadBasicClasses() {
addReflectionTraceClasses();
for(int i=SootClass.BODIES;i>=SootClass.HIERARCHY;i--) {
for(String name: basicclasses[i]) {
tryLoadClass(name,i);
}
}
}
private void addReflectionTraceClasses() {
CGOptions options = new CGOptions( PhaseOptions.v().getPhaseOptions("cg") );
String log = options.reflection_log();
Set<String> classNames = new HashSet<String>();
if(log!=null && log.length()>0) {
BufferedReader reader;
String line="";
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream(log)));
while((line=reader.readLine())!=null) {
if(line.length()==0) continue;
String[] portions = line.split(";");
String kind = portions[0];
String target = portions[1];
String source = portions[2];
String classNameDotMethodName = source.substring(0,source.indexOf("("));
String sourceClassName = classNameDotMethodName.substring(0,classNameDotMethodName.lastIndexOf("."));
classNames.add(sourceClassName);
if(kind.equals("Class.forName")) {
classNames.add(target);
} else if(kind.equals("Class.newInstance")) {
classNames.add(target);
} else if(kind.equals("Method.invoke") || kind.equals("Constructor.newInstance")) {
classNames.add(signatureToClass(target));
} else throw new RuntimeException("Unknown entry kind: "+kind);
}
} catch (Exception e) {
throw new RuntimeException("Line: '"+line+"'", e);
}
}
for (String c : classNames) {
addBasicClass(c, SootClass.BODIES);
}
}
private List<SootClass> dynamicClasses;
public Collection<SootClass> dynamicClasses() {
if(dynamicClasses==null) {
throw new IllegalStateException("Have to call loadDynamicClasses() first!");
}
return dynamicClasses;
}
private void loadNecessaryClass(String name) {
SootClass c;
c = Scene.v().loadClassAndSupport(name);
c.setApplicationClass();
}
/** Load the set of classes that soot needs, including those specified on the
* command-line. This is the standard way of initialising the list of
* classes soot should use.
*/
public void loadNecessaryClasses() {
loadBasicClasses();
Iterator<String> it = Options.v().classes().iterator();
while (it.hasNext()) {
String name = (String) it.next();
loadNecessaryClass(name);
}
loadDynamicClasses();
for( Iterator<String> pathIt = Options.v().process_dir().iterator(); pathIt.hasNext(); ) {
final String path = (String) pathIt.next();
for (String cl : SourceLocator.v().getClassesUnder(path)) {
Scene.v().loadClassAndSupport(cl).setApplicationClass();
}
}
prepareClasses();
setDoneResolving();
}
public void loadDynamicClasses() {
dynamicClasses = new ArrayList<SootClass>();
HashSet<String> dynClasses = new HashSet<String>();
dynClasses.addAll(Options.v().dynamic_class());
for( Iterator<String> pathIt = Options.v().dynamic_dir().iterator(); pathIt.hasNext(); ) {
final String path = (String) pathIt.next();
dynClasses.addAll(SourceLocator.v().getClassesUnder(path));
}
for( Iterator<String> pkgIt = Options.v().dynamic_package().iterator(); pkgIt.hasNext(); ) {
final String pkg = (String) pkgIt.next();
dynClasses.addAll(SourceLocator.v().classesInDynamicPackage(pkg));
}
for (String className : dynClasses) {
dynamicClasses.add( Scene.v().loadClassAndSupport(className) );
}
//remove non-concrete classes that may accidentally have been loaded
for (Iterator<SootClass> iterator = dynamicClasses.iterator(); iterator.hasNext();) {
SootClass c = iterator.next();
if(!c.isConcrete()) {
if(Options.v().verbose()) {
G.v().out.println("Warning: dynamic class "+c.getName()+" is abstract or an interface, and it will not be considered.");
}
iterator.remove();
}
}
}
/* Generate classes to process, adding or removing package marked by
* command line options.
*/
private void prepareClasses() {
LinkedList<String> excludedPackages = new LinkedList<String>();
if (Options.v().exclude() != null)
excludedPackages.addAll(Options.v().exclude());
if( !Options.v().include_all() ) {
excludedPackages.add("java.");
excludedPackages.add("sun.");
excludedPackages.add("javax.");
excludedPackages.add("com.sun.");
excludedPackages.add("com.ibm.");
excludedPackages.add("org.xml.");
excludedPackages.add("org.w3c.");
excludedPackages.add("org.apache.");
}
// Remove/add all classes from packageInclusionMask as per -i option
Chain<SootClass> processedClasses = new HashChain<SootClass>();
while(true) {
Chain<SootClass> unprocessedClasses = new HashChain<SootClass>(Scene.v().getClasses());
unprocessedClasses.removeAll(processedClasses);
if( unprocessedClasses.isEmpty() ) break;
processedClasses.addAll(unprocessedClasses);
for (SootClass s : unprocessedClasses) {
if( s.isPhantom() ) continue;
if(Options.v().app()) {
s.setApplicationClass();
}
if (Options.v().classes().contains(s.getName())) {
s.setApplicationClass();
continue;
}
for( Iterator<String> pkgIt = excludedPackages.iterator(); pkgIt.hasNext(); ) {
final String pkg = (String) pkgIt.next();
if (s.isApplicationClass()
&& s.getPackageName().startsWith(pkg)) {
s.setLibraryClass();
}
}
for( Iterator<String> pkgIt = Options.v().include().iterator(); pkgIt.hasNext(); ) {
final String pkg = (String) pkgIt.next();
if (s.getPackageName().startsWith(pkg))
s.setApplicationClass();
}
if(s.isApplicationClass()) {
// make sure we have the support
Scene.v().loadClassAndSupport(s.getName());
}
}
}
}
ArrayList<String> pkgList;
public void setPkgList(ArrayList<String> list){
pkgList = list;
}
public ArrayList<String> getPkgList(){
return pkgList;
}
/** Create an unresolved reference to a method. */
public SootMethodRef makeMethodRef(
SootClass declaringClass,
String name,
List<Type> parameterTypes,
Type returnType,
boolean isStatic ) {
return new SootMethodRefImpl(declaringClass, name, parameterTypes,
returnType, isStatic);
}
/** Create an unresolved reference to a constructor. */
public SootMethodRef makeConstructorRef(
SootClass declaringClass,
List<Type> parameterTypes) {
return makeMethodRef(declaringClass, SootMethod.constructorName,
parameterTypes, VoidType.v(), false );
}
/** Create an unresolved reference to a field. */
public SootFieldRef makeFieldRef(
SootClass declaringClass,
String name,
Type type,
boolean isStatic) {
return new AbstractSootFieldRef(declaringClass, name, type, isStatic);
}
/** Returns the list of SootClasses that have been resolved at least to
* the level specified. */
public List/*SootClass*/<SootClass> getClasses(int desiredLevel) {
List<SootClass> ret = new ArrayList<SootClass>();
for( Iterator<SootClass> clIt = getClasses().iterator(); clIt.hasNext(); ) {
final SootClass cl = (SootClass) clIt.next();
if( cl.resolvingLevel() >= desiredLevel ) ret.add(cl);
}
return ret;
}
private boolean doneResolving = false;
private boolean incrementalBuild;
public boolean doneResolving() { return doneResolving; }
public void setDoneResolving() { doneResolving = true; }
public void setMainClassFromOptions() {
if(mainClass != null) return;
if( Options.v().main_class() != null
&& Options.v().main_class().length() > 0 ) {
setMainClass(getSootClass(Options.v().main_class()));
} else {
// try to infer a main class if none is given
for (Iterator<SootClass> classIter = getApplicationClasses().iterator(); classIter.hasNext();) {
SootClass c = (SootClass) classIter.next();
if (c.declaresMethod ("main", new SingletonList( ArrayType.v(RefType.v("java.lang.String"), 1) ), VoidType.v()))
{
G.v().out.println("No main class given. Inferred '"+c.getName()+"' as main class.");
setMainClass(c);
break;
}
}
}
}
/**
* This method returns true when in incremental build mode.
* Other classes can query this flag and change the way in which they use the Scene,
* depending on the flag's value.
*/
public boolean isIncrementalBuild() {
return incrementalBuild;
}
public void initiateIncrementalBuild() {
this.incrementalBuild = true;
}
public void incrementalBuildFinished() {
this.incrementalBuild = false;
}
}
|
no need for the indirection
|
src/soot/Scene.java
|
no need for the indirection
|
|
Java
|
lgpl-2.1
|
0e0bf6adae8bcf75c130c626b65e39ea296b3e63
| 0
|
pbondoer/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.xpn.xwiki.store.migration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.context.Execution;
import org.xwiki.context.ExecutionContext;
import com.xpn.xwiki.XWikiConfig;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
/**
* Template for {@link DataMigrationManager}.
*
* @version $Id$
* @since 3.4M1
*/
public abstract class AbstractDataMigrationManager implements DataMigrationManager, Initializable
{
/**
* Component manager used to access stores and data migrations.
*/
@Inject
protected ComponentManager componentManager;
/**
* Ordered list of migrators that may be applied.
*/
protected Collection<XWikiMigration> migrations;
/**
* Internal class used to find out the data migration that are being forced in the XWiki configuration file.
*/
protected class XWikiMigration
{
/**
* True for a forced migration.
*/
public boolean isForced;
/**
* The data migration.
*/
public DataMigration dataMigration;
/**
* Build a new XWikiMigration.
* @param dataMigration the data migration
* @param isForced true when this migration is forced
*/
public XWikiMigration(DataMigration dataMigration, boolean isForced)
{
this.dataMigration = dataMigration;
this.isForced = isForced;
}
}
/**
* Logger.
*/
@Inject
private Logger logger;
/**
* Execution context used to access XWikiContext.
*/
@Inject
private Execution execution;
/**
* Internal class used to prevent double checking of the database during migration operation.
*/
private class ThreadLock extends ThreadLocal<Integer>
{
@Override
protected Integer initialValue()
{
return 0;
}
/**
* Release the lock.
*/
public void unlock()
{
int i = get();
if (i > 0) {
set(--i);
}
}
/**
* Acquire the lock.
*/
public void lock()
{
set(get() + 1);
}
/**
* Try to acquire the lock.
* @return true if the lock has been acquired
*/
public boolean tryLock()
{
int i = get();
if (i > 0) {
return false;
}
set(++i);
return true;
}
}
/**
* Semaphore to prevent re-entrance.
*/
private final ThreadLock lock = new ThreadLock();
/**
* A cache of wiki database version.
*/
private final Map<String, XWikiDBVersion> versionCache = new HashMap<String, XWikiDBVersion>();
/**
* The final database version when the migration process finishes.
* This is use to compute the DBVersion of an empty store and quickly check the outdated status of existing DB
*/
private XWikiDBVersion targetVersion;
/**
* Unified constructor for all subclasses.
*/
public AbstractDataMigrationManager()
{
}
/**
* @return XWikiContext
*/
protected XWikiContext getXWikiContext()
{
ExecutionContext context = execution.getContext();
return (XWikiContext) context.getProperty("xwikicontext");
}
/**
* @return XWikiConfig to read configuration from xwiki.cfg
*/
protected XWikiConfig getXWikiConfig()
{
return getXWikiContext().getWiki().getConfig();
}
/**
* @return true if running in virtual mode
*/
protected boolean isVirtualMode()
{
return getXWikiContext().getWiki().isVirtualMode();
}
/**
* @return list of virtual database names
* @throws DataMigrationException on error
*/
protected List<String> getVirtualWikisDatabaseNames() throws DataMigrationException
{
try {
return getXWikiContext().getWiki().getVirtualWikisDatabaseNames(getXWikiContext());
} catch (XWikiException e) {
throw new DataMigrationException("Unable to retrieve the list of wiki names", e);
}
}
/**
* @return the main XWiki database name
*/
protected String getMainXWiki()
{
return getXWikiContext().getMainXWiki();
}
@Override
public void initialize() throws InitializationException
{
try {
SortedMap<XWikiDBVersion, XWikiMigration> availableMigrations
= new TreeMap<XWikiDBVersion, XWikiMigration>();
Map<XWikiDBVersion, XWikiMigration> forcedMigrations = getForcedMigrations();
if (!forcedMigrations.isEmpty()) {
availableMigrations.putAll(forcedMigrations);
} else {
Set<String> ignoredMigrations = new HashSet<String>(Arrays.asList(getXWikiConfig()
.getPropertyAsList("xwiki.store.migration.ignored")));
for (DataMigration migrator : getAllMigrations()) {
if (ignoredMigrations.contains(migrator.getClass().getName())
|| ignoredMigrations.contains(migrator.getVersion().toString()))
{
continue;
}
XWikiMigration migration = new XWikiMigration(migrator, false);
availableMigrations.put(migrator.getVersion(), migration);
}
}
this.targetVersion = (availableMigrations.size() > 0) ? availableMigrations.lastKey()
: new XWikiDBVersion(0);
this.migrations = availableMigrations.values();
} catch (Exception e) {
throw new InitializationException("Migration Manager initialization failed", e);
}
}
/**
* read data version from xwiki.cfg.
*
* @return data version if set, or null.
*/
protected XWikiDBVersion getDBVersionFromConfig()
{
String ver = getXWikiConfig().getProperty("xwiki.store.migration.version");
return ver == null ? null : new XWikiDBVersion(Integer.parseInt(ver));
}
/**
* Read data version from database.
* @return data version or null if this is a new database
* @throws DataMigrationException in case of an unexpected error
*/
protected XWikiDBVersion getDBVersionFromDatabase() throws DataMigrationException
{
return getDBVersionFromConfig();
}
@Override
public final XWikiDBVersion getDBVersion() throws DataMigrationException
{
lock.lock();
try {
String wikiName = getXWikiContext().getDatabase();
XWikiDBVersion version = this.versionCache.get(wikiName);
if (version == null) {
synchronized (this.versionCache) {
version = getDBVersionFromDatabase();
if (version != null) {
this.versionCache.put(wikiName, version);
}
}
}
return version;
} finally {
lock.unlock();
}
}
@Override
public final XWikiDBVersion getLatestVersion() {
return this.targetVersion;
}
@Override
public synchronized void initNewDB() throws DataMigrationException {
lock.lock();
try {
initializeEmptyDB();
} finally {
lock.unlock();
}
}
/**
* @throws DataMigrationException if any error
*/
protected abstract void initializeEmptyDB() throws DataMigrationException;
/**
* @param version to set
* @throws DataMigrationException if any error
*/
protected abstract void setDBVersionToDatabase(XWikiDBVersion version) throws DataMigrationException;
/**
* @param version to set
* @throws DataMigrationException if any error
*/
protected final synchronized void setDBVersion(XWikiDBVersion version) throws DataMigrationException
{
String wikiName = getXWikiContext().getDatabase();
setDBVersionToDatabase(version);
if (version != null) {
this.versionCache.put(wikiName, version);
}
}
/**
* Update database schema to the latest structure.
* @param migrations the migration that will be executed (since 4.0M1)
* @throws DataMigrationException if any error
*/
protected abstract void updateSchema(Collection<XWikiMigration> migrations) throws DataMigrationException;
@Override
public void checkDatabase() throws MigrationRequiredException, DataMigrationException
{
if (!lock.tryLock()) {
return;
}
try {
XWikiDBVersion version;
// Retrieve DB version
try {
version = getDBVersion();
} catch (DataMigrationException e) {
String message = String.format(
"Database %s seems to be inaccessible, please check your configuration!",
getXWikiContext().getDatabase());
logger.error(message, e);
throw new DataMigrationException(message, e);
}
// Initialize new DB
if (version == null) {
try {
initNewDB();
version = getLatestVersion();
} catch (DataMigrationException e) {
String message = String.format(
"The empty database %s seems to be not writable, please check your configuration!",
getXWikiContext().getDatabase());
logger.error(message, e);
throw new DataMigrationException(message, e);
}
}
// Proceed with migration (only once)
if (this.migrations != null) {
try {
XWikiConfig config = getXWikiConfig();
if ("1".equals(config.getProperty("xwiki.store.migration", "0"))
&& !"0".equals(config.getProperty("xwiki.store.hibernate.updateschema"))) {
// Run migrations
logger.info("Running storage schema updates and migrations");
startMigrations();
if ("1".equals(config.getProperty("xwiki.store.migration.exitAfterEnd", "0"))) {
logger.error("Exiting because xwiki.store.migration.exitAfterEnd is set");
System.exit(0);
}
version = getLatestVersion();
}
} finally {
// data migration are no more needed, migration only happen once
this.migrations = null;
}
}
// Prevent access to outdated DB
if (getLatestVersion().compareTo(version) > 0) {
String message = String.format(
"Database %s needs migration(s), it could not be safely used!", getXWikiContext().getDatabase());
logger.error(message);
throw new MigrationRequiredException(message);
}
} finally {
lock.unlock();
}
}
/**
* Start the migration process.
*
* @throws DataMigrationException in case of any error
*/
protected synchronized void startMigrations() throws DataMigrationException
{
if (this.migrations == null) {
return;
}
XWikiContext context = getXWikiContext();
if (isVirtualMode()) {
// Save context values so that we can restore them as they were before the migration.
String currentDatabase = context.getDatabase();
String currentOriginalDatabase = context.getOriginalDatabase();
int errorCount = 0;
try {
for (String database : getDatabasesToMigrate()) {
logger.info("Starting migration for database [{}]...", database);
// Set up the context so that it points to the virtual wiki corresponding to the
// database.
context.setDatabase(database);
context.setOriginalDatabase(database);
try {
startMigrationsForDatabase();
} catch (DataMigrationException e) {
errorCount++;
}
}
if (errorCount > 0) {
String message = String.format(
"%s database migration(s) failed, it is not safe to continue!", errorCount);
logger.error(message);
throw new DataMigrationException(message);
}
} finally {
context.setDatabase(currentDatabase);
context.setOriginalDatabase(currentOriginalDatabase);
}
} else {
// Just update schema and migrate the main wiki
try {
startMigrationsForDatabase();
} catch (DataMigrationException ex) {
String message = "Main database migration failed, it is not safe to continue!";
logger.error(message, ex);
throw new DataMigrationException(message, ex);
}
}
}
/**
* Returns the names of the databases that should be migrated.
* This is controlled through the "xwiki.store.migration.databases" configuration property in xwiki.cfg.
* A value of "all" or no value at all will add all databases. Note that the main database is automatically added
* even if not specified.
*
* @return The names of all databases to migrate.
* @throws DataMigrationException if the list of wikis cannot be obtained.
*/
private Set<String> getDatabasesToMigrate() throws DataMigrationException
{
Set<String> databasesToMigrate = new LinkedHashSet<String>();
// Always migrate the main database. We also want this to be the first database migrated so
// it has to be the
// first returned in the list.
databasesToMigrate.add(getMainXWiki());
// Add the databases listed by the user (if any). If there's no database name or
// a single database named and if it's "all" or "ALL" then automatically add all the registered databases.
if (isVirtualMode()) {
String[] databases =
getXWikiConfig().getPropertyAsList("xwiki.store.migration.databases");
if ((databases.length == 0) || ((databases.length == 1) && databases[0].equalsIgnoreCase("all"))) {
databasesToMigrate.addAll(getVirtualWikisDatabaseNames());
} else {
Collections.addAll(databasesToMigrate, databases);
}
}
return databasesToMigrate;
}
/**
* It is assumed that before calling this method the XWiki context has been set with the
* database to migrate.
*
* @throws DataMigrationException if there is an error updating the database.
*/
private void startMigrationsForDatabase() throws DataMigrationException
{
try {
Collection<XWikiMigration> neededMigrations = getNeededMigrations();
updateSchema(neededMigrations);
startMigrations(neededMigrations);
} catch (Exception e) {
String message = String.format("Failed to migrate database [%s]...", getXWikiContext().getDatabase());
logger.info(message, e);
throw new DataMigrationException(message, e);
}
}
/**
* @return collection of {@link DataMigration} in ascending order, which need be
* executed.
* @throws DataMigrationException if any error
*/
protected Collection<XWikiMigration> getNeededMigrations() throws DataMigrationException
{
XWikiDBVersion curversion = getDBVersion();
Collection<XWikiMigration> neededMigrations = new ArrayList<XWikiMigration>();
for (XWikiMigration migration : this.migrations) {
if (migration.isForced || (migration.dataMigration.getVersion().compareTo(curversion) > 0
&& migration.dataMigration.shouldExecute(curversion)))
{
neededMigrations.add(migration);
}
}
if (logger.isInfoEnabled()) {
if (!neededMigrations.isEmpty()) {
logger.info("Current storage version = [{}]", curversion.toString());
logger.info("List of migrations that will be executed:");
for (XWikiMigration migration : neededMigrations) {
logger.info(" {} - {}{}", new String[] {migration.dataMigration.getName(),
migration.dataMigration.getDescription(),
(migration.isForced ? " (forced)" : "")});
}
} else {
logger.info("No storage migration required since current version is [{}]", curversion);
}
}
return neededMigrations;
}
/**
* @return a map of forced {@link DataMigration} for this manager
* @throws DataMigrationException id any error
*/
protected Map<XWikiDBVersion, XWikiMigration> getForcedMigrations() throws DataMigrationException
{
SortedMap<XWikiDBVersion, XWikiMigration> forcedMigrations = new TreeMap<XWikiDBVersion, XWikiMigration>();
for (String hint : getXWikiConfig().getPropertyAsList("xwiki.store.migration.force")) {
try {
DataMigration dataMigration = componentManager.getInstance(DataMigration.class, hint);
forcedMigrations.put(dataMigration.getVersion(), new XWikiMigration(dataMigration, true));
} catch (ComponentLookupException e) {
throw new DataMigrationException("Forced dataMigration " + hint + " component could not be found", e);
}
}
return forcedMigrations;
}
/**
* @param migrations - run this migrations in order of collection
* @throws DataMigrationException if any error
*/
protected void startMigrations(Collection<XWikiMigration> migrations) throws DataMigrationException
{
XWikiDBVersion curversion = getDBVersion();
for (XWikiMigration migration : migrations) {
if (logger.isInfoEnabled()) {
logger.info("Running migration [{}] with version [{}]", migration.dataMigration.getName(),
migration.dataMigration.getVersion());
}
migration.dataMigration.migrate();
if (migration.dataMigration.getVersion().compareTo(curversion) > 0) {
curversion = migration.dataMigration.getVersion();
setDBVersion(curversion);
if (logger.isInfoEnabled()) {
logger.info("New storage version is now [{}]", getDBVersion());
}
}
}
// If migration is launch on an empty DB, properly set the latest DB version
if (curversion == null || getLatestVersion().compareTo(curversion) > 0) {
setDBVersion(getLatestVersion());
if (logger.isInfoEnabled() && curversion != null) {
logger.info("Latest migration(s) was unneeded, storage now forced to latest version [{}]",
getDBVersion());
}
}
}
/**
* @return List of all {@link DataMigration} for this manager
* @throws DataMigrationException if any error
*/
protected abstract List<? extends DataMigration> getAllMigrations() throws DataMigrationException;
}
|
xwiki-platform-core/xwiki-platform-oldcore/src/main/java/com/xpn/xwiki/store/migration/AbstractDataMigrationManager.java
|
/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.xpn.xwiki.store.migration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.xwiki.component.manager.ComponentLookupException;
import org.xwiki.component.manager.ComponentManager;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.context.Execution;
import org.xwiki.context.ExecutionContext;
import com.xpn.xwiki.XWikiConfig;
import com.xpn.xwiki.XWikiContext;
import com.xpn.xwiki.XWikiException;
/**
* Template for {@link DataMigrationManager}.
*
* @version $Id$
* @since 3.4M1
*/
public abstract class AbstractDataMigrationManager implements DataMigrationManager, Initializable
{
/**
* Component manager used to access stores and data migrations.
*/
@Inject
protected ComponentManager componentManager;
/**
* Ordered list of migrators that may be applied.
*/
protected Collection<XWikiMigration> migrations;
/**
* Internal class used to find out the data migration that are being forced in the XWiki configuration file.
*/
protected class XWikiMigration
{
/**
* True for a forced migration.
*/
public boolean isForced;
/**
* The data migration.
*/
public DataMigration dataMigration;
/**
* Build a new XWikiMigration.
* @param dataMigration the data migration
* @param isForced true when this migration is forced
*/
public XWikiMigration(DataMigration dataMigration, boolean isForced)
{
this.dataMigration = dataMigration;
this.isForced = isForced;
}
}
/**
* Logger.
*/
@Inject
private Logger logger;
/**
* Execution context used to access XWikiContext.
*/
@Inject
private Execution execution;
/**
* Internal class used to prevent double checking of the database during migration operation.
*/
private class ThreadLock extends ThreadLocal<Integer>
{
@Override
protected Integer initialValue()
{
return 0;
}
/**
* Release the lock.
*/
public void unlock()
{
int i = get();
if (i > 0) {
set(--i);
}
}
/**
* Acquire the lock.
*/
public void lock()
{
set(get() + 1);
}
/**
* Try to acquire the lock.
* @return true if the lock has been acquired
*/
public boolean tryLock()
{
int i = get();
if (i > 0) {
return false;
}
set(++i);
return true;
}
}
/**
* Semaphore to prevent re-entrance.
*/
private final ThreadLock lock = new ThreadLock();
/**
* A cache of wiki database version.
*/
private final Map<String, XWikiDBVersion> versionCache = new HashMap<String, XWikiDBVersion>();
/**
* The final database version when the migration process finishes.
* This is use to compute the DBVersion of an empty store and quickly check the outdated status of existing DB
*/
private XWikiDBVersion targetVersion;
/**
* Unified constructor for all subclasses.
*/
public AbstractDataMigrationManager()
{
}
/**
* @return XWikiContext
*/
protected XWikiContext getXWikiContext()
{
ExecutionContext context = execution.getContext();
return (XWikiContext) context.getProperty("xwikicontext");
}
/**
* @return XWikiConfig to read configuration from xwiki.cfg
*/
protected XWikiConfig getXWikiConfig()
{
return getXWikiContext().getWiki().getConfig();
}
/**
* @return true if running in virtual mode
*/
protected boolean isVirtualMode()
{
return getXWikiContext().getWiki().isVirtualMode();
}
/**
* @return list of virtual database names
* @throws DataMigrationException on error
*/
protected List<String> getVirtualWikisDatabaseNames() throws DataMigrationException
{
try {
return getXWikiContext().getWiki().getVirtualWikisDatabaseNames(getXWikiContext());
} catch (XWikiException e) {
throw new DataMigrationException("Unable to retrieve the list of wiki names", e);
}
}
/**
* @return the main XWiki database name
*/
protected String getMainXWiki()
{
return getXWikiContext().getMainXWiki();
}
@Override
public void initialize() throws InitializationException
{
try {
SortedMap<XWikiDBVersion, XWikiMigration> availableMigrations
= new TreeMap<XWikiDBVersion, XWikiMigration>();
Map<XWikiDBVersion, XWikiMigration> forcedMigrations = getForcedMigrations();
if (!forcedMigrations.isEmpty()) {
availableMigrations.putAll(forcedMigrations);
} else {
Set<String> ignoredMigrations = new HashSet<String>(Arrays.asList(getXWikiConfig()
.getPropertyAsList("xwiki.store.migration.ignored")));
for (DataMigration migrator : getAllMigrations()) {
if (ignoredMigrations.contains(migrator.getClass().getName())
|| ignoredMigrations.contains(migrator.getVersion().toString()))
{
continue;
}
XWikiMigration migration = new XWikiMigration(migrator, false);
availableMigrations.put(migrator.getVersion(), migration);
}
}
this.targetVersion = (availableMigrations.size() > 0) ? availableMigrations.lastKey()
: new XWikiDBVersion(0);
this.migrations = availableMigrations.values();
} catch (Exception e) {
throw new InitializationException("Migration Manager initialization failed", e);
}
}
/**
* read data version from xwiki.cfg.
*
* @return data version if set, or null.
*/
protected XWikiDBVersion getDBVersionFromConfig()
{
String ver = getXWikiConfig().getProperty("xwiki.store.migration.version");
return ver == null ? null : new XWikiDBVersion(Integer.parseInt(ver));
}
/**
* Read data version from database.
* @return data version or null if this is a new database
* @throws DataMigrationException in case of an unexpected error
*/
protected XWikiDBVersion getDBVersionFromDatabase() throws DataMigrationException
{
return getDBVersionFromConfig();
}
@Override
public final XWikiDBVersion getDBVersion() throws DataMigrationException
{
lock.lock();
try {
String wikiName = getXWikiContext().getDatabase();
XWikiDBVersion version = this.versionCache.get(wikiName);
if (version == null) {
synchronized (this.versionCache) {
version = getDBVersionFromDatabase();
if (version != null) {
this.versionCache.put(wikiName, version);
}
}
}
return version;
} finally {
lock.unlock();
}
}
@Override
public final XWikiDBVersion getLatestVersion() {
return this.targetVersion;
}
@Override
public synchronized void initNewDB() throws DataMigrationException {
lock.lock();
try {
initializeEmptyDB();
} finally {
lock.unlock();
}
}
/**
* @throws DataMigrationException if any error
*/
protected abstract void initializeEmptyDB() throws DataMigrationException;
/**
* @param version to set
* @throws DataMigrationException if any error
*/
protected abstract void setDBVersionToDatabase(XWikiDBVersion version) throws DataMigrationException;
/**
* @param version to set
* @throws DataMigrationException if any error
*/
protected final synchronized void setDBVersion(XWikiDBVersion version) throws DataMigrationException
{
String wikiName = getXWikiContext().getDatabase();
setDBVersionToDatabase(version);
if (version != null) {
this.versionCache.put(wikiName, version);
}
}
/**
* Update database schema to the latest structure.
* @param migrations the migration that will be executed (since 4.0M1)
* @throws DataMigrationException if any error
*/
protected abstract void updateSchema(Collection<XWikiMigration> migrations) throws DataMigrationException;
@Override
public void checkDatabase() throws MigrationRequiredException, DataMigrationException
{
if (!lock.tryLock()) {
return;
}
try {
XWikiDBVersion version;
// Retrieve DB version
try {
version = getDBVersion();
} catch (DataMigrationException e) {
String message = String.format(
"Database %s seems to be inaccessible, please check your configuration!",
getXWikiContext().getDatabase());
logger.error(message, e);
throw new DataMigrationException(message, e);
}
// Initialize new DB
if (version == null) {
try {
initNewDB();
version = getLatestVersion();
} catch (DataMigrationException e) {
String message = String.format(
"The empty database %s seems to be not writable, please check your configuration!",
getXWikiContext().getDatabase());
logger.error(message, e);
throw new DataMigrationException(message, e);
}
}
// Proceed with migration (only once)
if (this.migrations != null) {
try {
XWikiConfig config = getXWikiConfig();
if ("1".equals(config.getProperty("xwiki.store.migration", "0"))
&& !"0".equals(config.getProperty("xwiki.store.hibernate.updateschema"))) {
// Run migrations
logger.info("Running storage schema updates and migrations");
startMigrations();
if ("1".equals(config.getProperty("xwiki.store.migration.exitAfterEnd", "0"))) {
logger.error("Exiting because xwiki.store.migration.exitAfterEnd is set");
System.exit(0);
}
version = getLatestVersion();
}
} finally {
// data migration are no more needed, migration only happen once
this.migrations = null;
}
}
// Prevent access to outdated DB
if (getLatestVersion().compareTo(version) > 0) {
String message = String.format(
"Database %s needs migration(s), it could not be safely used!", getXWikiContext().getDatabase());
logger.error(message);
throw new MigrationRequiredException(message);
}
} finally {
lock.unlock();
}
}
/**
* Start the migration process.
*
* @throws DataMigrationException in case of any error
*/
protected synchronized void startMigrations() throws DataMigrationException
{
if (this.migrations == null) {
return;
}
XWikiContext context = getXWikiContext();
if (isVirtualMode()) {
// Save context values so that we can restore them as they were before the migration.
String currentDatabase = context.getDatabase();
String currentOriginalDatabase = context.getOriginalDatabase();
int errorCount = 0;
try {
for (String database : getDatabasesToMigrate()) {
logger.info("Starting migration for database [{}]...", database);
// Set up the context so that it points to the virtual wiki corresponding to the
// database.
context.setDatabase(database);
context.setOriginalDatabase(database);
try {
startMigrationsForDatabase();
} catch (DataMigrationException e) {
errorCount++;
}
}
if (errorCount > 0) {
String message = String.format(
"%s database migration(s) failed, it is not safe to continue!", errorCount);
logger.error(message);
throw new DataMigrationException(message);
}
} finally {
context.setDatabase(currentDatabase);
context.setOriginalDatabase(currentOriginalDatabase);
}
} else {
// Just update schema and migrate the main wiki
try {
startMigrationsForDatabase();
} catch (DataMigrationException ex) {
String message = "Main database migration failed, it is not safe to continue!";
logger.error(message, ex);
throw new DataMigrationException(message, ex);
}
}
}
/**
* Returns the names of the databases that should be migrated.
* This is controlled through the "xwiki.store.migration.databases" configuration property in xwiki.cfg.
* A value of "all" or no value at all will add all databases. Note that the main database is automatically added
* even if not specified.
*
* @return The names of all databases to migrate.
* @throws DataMigrationException if the list of wikis cannot be obtained.
*/
private Set<String> getDatabasesToMigrate() throws DataMigrationException
{
Set<String> databasesToMigrate = new LinkedHashSet<String>();
// Always migrate the main database. We also want this to be the first database migrated so
// it has to be the
// first returned in the list.
databasesToMigrate.add(getMainXWiki());
// Add the databases listed by the user (if any). If there's no database name or
// a single database named and if it's "all" or "ALL" then automatically add all the registered databases.
if (isVirtualMode()) {
String[] databases =
getXWikiConfig().getPropertyAsList("xwiki.store.migration.databases");
if ((databases.length == 0) || ((databases.length == 1) && databases[0].equalsIgnoreCase("all"))) {
databasesToMigrate.addAll(getVirtualWikisDatabaseNames());
} else {
Collections.addAll(databasesToMigrate, databases);
}
}
return databasesToMigrate;
}
/**
* It is assumed that before calling this method the XWiki context has been set with the
* database to migrate.
*
* @throws DataMigrationException if there is an error updating the database.
*/
private void startMigrationsForDatabase() throws DataMigrationException
{
try {
Collection<XWikiMigration> neededMigrations = getNeededMigrations();
updateSchema(neededMigrations);
startMigrations(neededMigrations);
} catch (Exception e) {
String message = String.format("Failed to migrate database [%s]...", getXWikiContext().getDatabase());
logger.info(message, e);
throw new DataMigrationException(message, e);
}
}
/**
* @return collection of {@link DataMigration} in ascending order, which need be
* executed.
* @throws DataMigrationException if any error
*/
protected Collection<XWikiMigration> getNeededMigrations() throws DataMigrationException
{
XWikiDBVersion curversion = getDBVersion();
Collection<XWikiMigration> neededMigrations = new ArrayList<XWikiMigration>();
for (XWikiMigration migration : this.migrations) {
if (migration.isForced || (migration.dataMigration.getVersion().compareTo(curversion) > 0
&& migration.dataMigration.shouldExecute(curversion)))
{
neededMigrations.add(migration);
}
}
if (logger.isInfoEnabled()) {
if (!neededMigrations.isEmpty()) {
logger.info("Current storage version = [{}]", curversion.toString());
logger.info("List of migrations that will be executed:");
for (XWikiMigration migration : neededMigrations) {
logger.info(" {} - {}{}", new String[] {migration.dataMigration.getName(),
migration.dataMigration.getDescription(),
(migration.isForced ? " (forced)" : "")});
}
} else {
logger.info("No storage migration required since current version is [{}]", curversion);
}
}
return neededMigrations;
}
/**
* @return a map of forced {@link DataMigration} for this manager
* @throws DataMigrationException id any error
*/
protected Map<XWikiDBVersion, XWikiMigration> getForcedMigrations() throws DataMigrationException
{
SortedMap<XWikiDBVersion, XWikiMigration> forcedMigrations = new TreeMap<XWikiDBVersion, XWikiMigration>();
for (String hint : getXWikiConfig().getPropertyAsList("xwiki.store.migration.force")) {
try {
DataMigration dataMigration = componentManager.lookup(DataMigration.class, hint);
forcedMigrations.put(dataMigration.getVersion(), new XWikiMigration(dataMigration, true));
} catch (ComponentLookupException e) {
throw new DataMigrationException("Forced dataMigration " + hint + " component could not be found", e);
}
}
return forcedMigrations;
}
/**
* @param migrations - run this migrations in order of collection
* @throws DataMigrationException if any error
*/
protected void startMigrations(Collection<XWikiMigration> migrations) throws DataMigrationException
{
XWikiDBVersion curversion = getDBVersion();
for (XWikiMigration migration : migrations) {
if (logger.isInfoEnabled()) {
logger.info("Running migration [{}] with version [{}]", migration.dataMigration.getName(),
migration.dataMigration.getVersion());
}
migration.dataMigration.migrate();
if (migration.dataMigration.getVersion().compareTo(curversion) > 0) {
curversion = migration.dataMigration.getVersion();
setDBVersion(curversion);
if (logger.isInfoEnabled()) {
logger.info("New storage version is now [{}]", getDBVersion());
}
}
}
// If migration is launch on an empty DB, properly set the latest DB version
if (curversion == null || getLatestVersion().compareTo(curversion) > 0) {
setDBVersion(getLatestVersion());
if (logger.isInfoEnabled() && curversion != null) {
logger.info("Latest migration(s) was unneeded, storage now forced to latest version [{}]",
getDBVersion());
}
}
}
/**
* @return List of all {@link DataMigration} for this manager
* @throws DataMigrationException if any error
*/
protected abstract List<? extends DataMigration> getAllMigrations() throws DataMigrationException;
}
|
[Misc] Removed more deprecated lookup() calls in favor of getInstance()
|
xwiki-platform-core/xwiki-platform-oldcore/src/main/java/com/xpn/xwiki/store/migration/AbstractDataMigrationManager.java
|
[Misc] Removed more deprecated lookup() calls in favor of getInstance()
|
|
Java
|
apache-2.0
|
6d6d0a38880817aac197202930bf5b3308a3cfc8
| 0
|
debezium/debezium,debezium/debezium,debezium/debezium,jpechane/debezium,jpechane/debezium,debezium/debezium,jpechane/debezium,jpechane/debezium
|
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.connector.sqlserver;
import static io.debezium.connector.sqlserver.util.TestHelper.TYPE_LENGTH_PARAMETER_KEY;
import static io.debezium.connector.sqlserver.util.TestHelper.TYPE_NAME_PARAMETER_KEY;
import static io.debezium.connector.sqlserver.util.TestHelper.TYPE_SCALE_PARAMETER_KEY;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.assertions.MapAssert.entry;
import static org.junit.Assert.assertNull;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
import org.awaitility.Awaitility;
import org.fest.assertions.Assertions;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import io.debezium.config.Configuration;
import io.debezium.connector.sqlserver.SqlServerConnectorConfig.SnapshotMode;
import io.debezium.connector.sqlserver.util.TestHelper;
import io.debezium.data.Envelope;
import io.debezium.data.SchemaAndValueField;
import io.debezium.data.SourceRecordAssert;
import io.debezium.data.VerifyRecord;
import io.debezium.doc.FixFor;
import io.debezium.embedded.AbstractConnectorTest;
import io.debezium.junit.logging.LogInterceptor;
import io.debezium.relational.RelationalDatabaseConnectorConfig;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.relational.history.DatabaseHistory;
import io.debezium.relational.history.DatabaseHistoryException;
import io.debezium.relational.history.DatabaseHistoryListener;
import io.debezium.relational.history.FileDatabaseHistory;
import io.debezium.relational.history.HistoryRecordComparator;
import io.debezium.relational.history.TableChanges;
import io.debezium.schema.DatabaseSchema;
import io.debezium.util.Testing;
/**
* Integration test for the Debezium SQL Server connector.
*
* @author Jiri Pechanec
*/
public class SqlServerConnectorIT extends AbstractConnectorTest {
private SqlServerConnection connection;
@Before
public void before() throws SQLException {
TestHelper.createTestDatabase();
connection = TestHelper.testConnection();
connection.execute(
"CREATE TABLE tablea (id int primary key, cola varchar(30))",
"CREATE TABLE tableb (id int primary key, colb varchar(30))",
"INSERT INTO tablea VALUES(1, 'a')");
TestHelper.enableTableCdc(connection, "tablea");
TestHelper.enableTableCdc(connection, "tableb");
initializeConnectorTestFramework();
Testing.Files.delete(TestHelper.DB_HISTORY_PATH);
// Testing.Print.enable();
}
@After
public void after() throws SQLException {
if (connection != null) {
connection.close();
}
}
@Test
public void createAndDelete() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
connection.execute("DELETE FROM tableB");
final SourceRecords deleteRecords = consumeRecordsByTopic(2 * RECORDS_PER_TABLE);
final List<SourceRecord> deleteTableA = deleteRecords.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> deleteTableB = deleteRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(deleteTableA).isNullOrEmpty();
Assertions.assertThat(deleteTableB).hasSize(2 * RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord deleteRecord = deleteTableB.get(i * 2);
final SourceRecord tombstoneRecord = deleteTableB.get(i * 2 + 1);
final List<SchemaAndValueField> expectedDeleteRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct deleteKey = (Struct) deleteRecord.key();
final Struct deleteValue = (Struct) deleteRecord.value();
assertRecord((Struct) deleteValue.get("before"), expectedDeleteRow);
assertNull(deleteValue.get("after"));
final Struct tombstoneKey = (Struct) tombstoneRecord.key();
final Struct tombstoneValue = (Struct) tombstoneRecord.value();
assertNull(tombstoneValue);
}
stopConnector();
}
@Test
@FixFor("DBZ-1642")
public void readOnlyApplicationIntent() throws Exception {
final LogInterceptor logInterceptor = new LogInterceptor();
final String appId = "readOnlyApplicationIntent-" + UUID.randomUUID();
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with("database.applicationIntent", "ReadOnly")
.with("database.applicationName", appId)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
TestHelper.waitForSnapshotToBeCompleted();
consumeRecordsByTopic(1);
TestHelper.waitForStreamingStarted();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES, 24);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
assertThat(logInterceptor.containsMessage("Schema locking was disabled in connector configuration")).isTrue();
// Verify that multiple subsequent transactions are used in streaming phase with read-only intent
try (final SqlServerConnection admin = TestHelper.adminConnection()) {
final Set<Long> txIds = new HashSet<>();
Awaitility.await().atMost(TestHelper.waitTimeForRecords() * 5, TimeUnit.SECONDS).pollInterval(100, TimeUnit.MILLISECONDS).until(() -> {
admin.query(
"SELECT (SELECT transaction_id FROM sys.dm_tran_session_transactions AS t WHERE s.session_id=t.session_id) FROM sys.dm_exec_sessions AS s WHERE program_name='"
+ appId + "'",
rs -> {
rs.next();
txIds.add(rs.getLong(1));
});
return txIds.size() > 2;
});
}
stopConnector();
}
@Test
@FixFor("DBZ-1643")
public void timestampAndTimezone() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final TimeZone currentTimeZone = TimeZone.getDefault();
try {
TimeZone.setDefault(TimeZone.getTimeZone("Australia/Canberra"));
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
final Instant now = Instant.now();
final Instant lowerBound = now.minusSeconds(5 * 60);
final Instant upperBound = now.plusSeconds(5 * 60);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final long timestamp = ((Struct) recordA.value()).getStruct("source").getInt64("ts_ms");
final Instant instant = Instant.ofEpochMilli(timestamp);
Assertions.assertThat(instant.isAfter(lowerBound) && instant.isBefore(upperBound)).isTrue();
}
stopConnector();
}
finally {
TimeZone.setDefault(currentTimeZone);
}
}
@Test
public void deleteWithoutTombstone() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TOMBSTONES_ON_DELETE, false)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
connection.execute("DELETE FROM tableB");
final SourceRecords deleteRecords = consumeRecordsByTopic(RECORDS_PER_TABLE);
final List<SourceRecord> deleteTableA = deleteRecords.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> deleteTableB = deleteRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(deleteTableA).isNullOrEmpty();
Assertions.assertThat(deleteTableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord deleteRecord = deleteTableB.get(i);
final List<SchemaAndValueField> expectedDeleteRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct deleteKey = (Struct) deleteRecord.key();
final Struct deleteValue = (Struct) deleteRecord.value();
assertRecord((Struct) deleteValue.get("before"), expectedDeleteRow);
assertNull(deleteValue.get("after"));
}
stopConnector();
}
@Test
public void update() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Testing.Print.enable();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
final String[] tableBInserts = new String[RECORDS_PER_TABLE];
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
tableBInserts[i] = "INSERT INTO tableb VALUES(" + id + ", 'b')";
}
connection.execute(tableBInserts);
connection.setAutoCommit(true);
connection.execute("UPDATE tableb SET colb='z'");
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * 2);
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE * 2);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordB = tableB.get(i + RECORDS_PER_TABLE);
final List<SchemaAndValueField> expectedBefore = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedAfter = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "z"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("before"), expectedBefore);
assertRecord((Struct) valueB.get("after"), expectedAfter);
}
stopConnector();
}
@Test
public void updatePrimaryKey() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Testing.Print.enable();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO tableb VALUES(1, 'b')");
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
connection.execute(
"UPDATE tablea SET id=100 WHERE id=1",
"UPDATE tableb SET id=100 WHERE id=1");
final SourceRecords records = consumeRecordsByTopic(6);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(3);
Assertions.assertThat(tableB).hasSize(3);
final List<SchemaAndValueField> expectedDeleteRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedDeleteKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedInsertKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordA = tableA.get(0);
final SourceRecord tombstoneRecordA = tableA.get(1);
final SourceRecord insertRecordA = tableA.get(2);
final Struct deleteKeyA = (Struct) deleteRecordA.key();
final Struct deleteValueA = (Struct) deleteRecordA.value();
assertRecord(deleteValueA.getStruct("before"), expectedDeleteRowA);
assertRecord(deleteKeyA, expectedDeleteKeyA);
assertNull(deleteValueA.get("after"));
final Struct tombstoneKeyA = (Struct) tombstoneRecordA.key();
final Struct tombstoneValueA = (Struct) tombstoneRecordA.value();
assertRecord(tombstoneKeyA, expectedDeleteKeyA);
assertNull(tombstoneValueA);
final Struct insertKeyA = (Struct) insertRecordA.key();
final Struct insertValueA = (Struct) insertRecordA.value();
assertRecord(insertValueA.getStruct("after"), expectedInsertRowA);
assertRecord(insertKeyA, expectedInsertKeyA);
assertNull(insertValueA.get("before"));
final List<SchemaAndValueField> expectedDeleteRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedDeleteKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedInsertKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordB = tableB.get(0);
final SourceRecord tombstoneRecordB = tableB.get(1);
final SourceRecord insertRecordB = tableB.get(2);
final Struct deletekeyB = (Struct) deleteRecordB.key();
final Struct deleteValueB = (Struct) deleteRecordB.value();
assertRecord(deleteValueB.getStruct("before"), expectedDeleteRowB);
assertRecord(deletekeyB, expectedDeleteKeyB);
assertNull(deleteValueB.get("after"));
assertThat(deleteValueB.getStruct("source").getInt64("event_serial_no")).isEqualTo(1L);
final Struct tombstonekeyB = (Struct) tombstoneRecordB.key();
final Struct tombstoneValueB = (Struct) tombstoneRecordB.value();
assertRecord(tombstonekeyB, expectedDeleteKeyB);
assertNull(tombstoneValueB);
final Struct insertkeyB = (Struct) insertRecordB.key();
final Struct insertValueB = (Struct) insertRecordB.value();
assertRecord(insertValueB.getStruct("after"), expectedInsertRowB);
assertRecord(insertkeyB, expectedInsertKeyB);
assertNull(insertValueB.get("before"));
assertThat(insertValueB.getStruct("source").getInt64("event_serial_no")).isEqualTo(2L);
stopConnector();
}
@Test
@FixFor("DBZ-1152")
public void updatePrimaryKeyWithRestartInMiddle() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config, record -> {
final Struct envelope = (Struct) record.value();
return envelope != null && "c".equals(envelope.get("op")) && (envelope.getStruct("after").getInt32("id") == 100);
});
assertConnectorIsRunning();
// Testing.Print.enable();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO tableb VALUES(1, 'b')");
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
connection.execute(
"UPDATE tablea SET id=100 WHERE id=1",
"UPDATE tableb SET id=100 WHERE id=1");
final SourceRecords records1 = consumeRecordsByTopic(2);
stopConnector();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords records2 = consumeRecordsByTopic(4);
final List<SourceRecord> tableA = records1.recordsForTopic("server1.dbo.tablea");
tableA.addAll(records2.recordsForTopic("server1.dbo.tablea"));
final List<SourceRecord> tableB = records2.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(3);
Assertions.assertThat(tableB).hasSize(3);
final List<SchemaAndValueField> expectedDeleteRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedDeleteKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedInsertKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordA = tableA.get(0);
final SourceRecord tombstoneRecordA = tableA.get(1);
final SourceRecord insertRecordA = tableA.get(2);
final Struct deleteKeyA = (Struct) deleteRecordA.key();
final Struct deleteValueA = (Struct) deleteRecordA.value();
assertRecord(deleteValueA.getStruct("before"), expectedDeleteRowA);
assertRecord(deleteKeyA, expectedDeleteKeyA);
assertNull(deleteValueA.get("after"));
final Struct tombstoneKeyA = (Struct) tombstoneRecordA.key();
final Struct tombstoneValueA = (Struct) tombstoneRecordA.value();
assertRecord(tombstoneKeyA, expectedDeleteKeyA);
assertNull(tombstoneValueA);
final Struct insertKeyA = (Struct) insertRecordA.key();
final Struct insertValueA = (Struct) insertRecordA.value();
assertRecord(insertValueA.getStruct("after"), expectedInsertRowA);
assertRecord(insertKeyA, expectedInsertKeyA);
assertNull(insertValueA.get("before"));
final List<SchemaAndValueField> expectedDeleteRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedDeleteKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedInsertKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordB = tableB.get(0);
final SourceRecord tombstoneRecordB = tableB.get(1);
final SourceRecord insertRecordB = tableB.get(2);
final Struct deletekeyB = (Struct) deleteRecordB.key();
final Struct deleteValueB = (Struct) deleteRecordB.value();
assertRecord(deleteValueB.getStruct("before"), expectedDeleteRowB);
assertRecord(deletekeyB, expectedDeleteKeyB);
assertNull(deleteValueB.get("after"));
final Struct tombstonekeyB = (Struct) tombstoneRecordB.key();
final Struct tombstoneValueB = (Struct) tombstoneRecordB.value();
assertRecord(tombstonekeyB, expectedDeleteKeyB);
assertNull(tombstoneValueB);
final Struct insertkeyB = (Struct) insertRecordB.key();
final Struct insertValueB = (Struct) insertRecordB.value();
assertRecord(insertValueB.getStruct("after"), expectedInsertRowB);
assertRecord(insertkeyB, expectedInsertKeyB);
assertNull(insertValueB.get("before"));
stopConnector();
}
@Test
@FixFor("DBZ-2329")
public void updatePrimaryKeyTwiceWithRestartInMiddleOfTx() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.MAX_QUEUE_SIZE, 2)
.with(SqlServerConnectorConfig.MAX_BATCH_SIZE, 1)
.with(SqlServerConnectorConfig.TOMBSTONES_ON_DELETE, false)
.build();
// Testing.Print.enable();
// Wait for snapshot completion
start(SqlServerConnector.class, config, record -> {
final Struct envelope = (Struct) record.value();
boolean stop = envelope != null && "d".equals(envelope.get("op")) && (envelope.getStruct("before").getInt32("id") == 305);
return stop;
});
assertConnectorIsRunning();
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
connection.execute("INSERT INTO tableb (id, colb) values (1,'1')");
connection.execute("INSERT INTO tableb (id, colb) values (2,'2')");
connection.execute("INSERT INTO tableb (id, colb) values (3,'3')");
connection.execute("INSERT INTO tableb (id, colb) values (4,'4')");
connection.execute("INSERT INTO tableb (id, colb) values (5,'5')");
consumeRecordsByTopic(5);
connection.execute("UPDATE tableb set id = colb + 300");
connection.execute("UPDATE tableb set id = colb + 300");
final SourceRecords records1 = consumeRecordsByTopic(14);
stopConnector();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords records2 = consumeRecordsByTopic(6);
final List<SourceRecord> tableB = records1.recordsForTopic("server1.dbo.tableb");
tableB.addAll(records2.recordsForTopic("server1.dbo.tableb"));
Assertions.assertThat(tableB).hasSize(20);
stopConnector();
}
@Test
public void streamChangesWhileStopped() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 100;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
stopConnector();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = i + ID_RESTART;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
}
@Test
@FixFor("DBZ-1069")
public void verifyOffsets() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 100;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
final List<Integer> expectedIds = new ArrayList<>();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
expectedIds.add(id);
}
final String tableaCT = connection.getNameOfChangeTable("tablea");
final String tablebCT = connection.getNameOfChangeTable("tableb");
TestHelper.waitForCdcRecord(connection, "tableb", rs -> rs.getInt("id") == expectedIds.get(expectedIds.size() - 1));
Awaitility.await().atMost(30, TimeUnit.SECONDS).until(() -> {
// Wait for max lsn to be available
if (!connection.getMaxLsn().isAvailable()) {
return false;
}
// verify pre-snapshot inserts have succeeded
Map<String, Boolean> resultMap = new HashMap<>();
connection.listOfChangeTables().forEach(ct -> {
final String tableName = ct.getChangeTableId().table();
if (tableName.endsWith("dbo_" + tableaCT) || tableName.endsWith("dbo_" + tablebCT)) {
try {
final Lsn minLsn = connection.getMinLsn(tableName);
final Lsn maxLsn = connection.getMaxLsn();
SqlServerChangeTable[] tables = Collections.singletonList(ct).toArray(new SqlServerChangeTable[]{});
final List<Integer> ids = new ArrayList<>();
connection.getChangesForTables(tables, minLsn, maxLsn, resultsets -> {
final ResultSet rs = resultsets[0];
while (rs.next()) {
ids.add(rs.getInt("id"));
}
});
if (ids.equals(expectedIds)) {
resultMap.put(tableName, true);
}
else {
resultMap.put(tableName, false);
}
}
catch (Exception e) {
org.junit.Assert.fail("Failed to fetch changes for table " + tableName + ": " + e.getMessage());
}
}
});
return resultMap.values().stream().filter(v -> !v).count() == 0;
});
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
List<SourceRecord> records = consumeRecordsByTopic(1 + RECORDS_PER_TABLE * TABLES).allRecordsInOrder();
records = records.subList(1, records.size());
for (Iterator<SourceRecord> it = records.iterator(); it.hasNext();) {
SourceRecord record = it.next();
assertThat(record.sourceOffset().get("snapshot")).as("Snapshot phase").isEqualTo(true);
if (it.hasNext()) {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot in progress").isEqualTo(false);
}
else {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot completed").isEqualTo(true);
}
}
stopConnector();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords sourceRecords = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = sourceRecords.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = sourceRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = i + ID_RESTART;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
assertThat(recordA.sourceOffset().get("snapshot")).as("Streaming phase").isNull();
assertThat(recordA.sourceOffset().get("snapshot_completed")).as("Streaming phase").isNull();
assertThat(recordA.sourceOffset().get("change_lsn")).as("LSN present").isNotNull();
assertThat(recordB.sourceOffset().get("snapshot")).as("Streaming phase").isNull();
assertThat(recordB.sourceOffset().get("snapshot_completed")).as("Streaming phase").isNull();
assertThat(recordB.sourceOffset().get("change_lsn")).as("LSN present").isNotNull();
}
}
@Test
public void testWhitelistTable() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_WHITELIST, "dbo.tableb")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
public void testTableIncludeList() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.tableb")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
public void testBlacklistTable() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_BLACKLIST, "dbo.tablea")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
public void testTableExcludeList() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_EXCLUDE_LIST, "dbo.tablea")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
@FixFor("DBZ-1617")
public void blacklistColumnWhenCdcColumnsDoNotMatchWithOriginalSnapshot() throws Exception {
connection.execute("CREATE TABLE table_a (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "table_a");
connection.execute("ALTER TABLE table_a ADD blacklisted_column varchar(30)");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_EXCLUDE_LIST, "dbo.table_a.blacklisted_column")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO table_a VALUES(10, 'some_name', 120, 'some_string')");
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name")
.put("amount", 120);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
stopConnector();
}
@Test
@FixFor("DBZ-1067")
public void testBlacklistColumn() throws Exception {
connection.execute(
"CREATE TABLE blacklist_column_table_a (id int, name varchar(30), amount integer primary key(id))",
"CREATE TABLE blacklist_column_table_b (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "blacklist_column_table_a");
TestHelper.enableTableCdc(connection, "blacklist_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_BLACKLIST, "dbo.blacklist_column_table_a.amount")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO blacklist_column_table_a VALUES(10, 'some_name', 120)");
connection.execute("INSERT INTO blacklist_column_table_b VALUES(11, 'some_name', 447)");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.blacklist_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.blacklist_column_table_b");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Schema expectedSchemaB = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_b.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueB = new Struct(expectedSchemaB)
.put("id", 11)
.put("name", "some_name")
.put("amount", 447);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
Assertions.assertThat(tableB).hasSize(1);
SourceRecordAssert.assertThat(tableB.get(0))
.valueAfterFieldIsEqualTo(expectedValueB)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaB);
stopConnector();
}
@Test
@FixFor("DBZ-1067")
public void testColumnExcludeList() throws Exception {
connection.execute(
"CREATE TABLE blacklist_column_table_a (id int, name varchar(30), amount integer primary key(id))",
"CREATE TABLE blacklist_column_table_b (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "blacklist_column_table_a");
TestHelper.enableTableCdc(connection, "blacklist_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_EXCLUDE_LIST, "dbo.blacklist_column_table_a.amount")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO blacklist_column_table_a VALUES(10, 'some_name', 120)");
connection.execute("INSERT INTO blacklist_column_table_b VALUES(11, 'some_name', 447)");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.blacklist_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.blacklist_column_table_b");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Schema expectedSchemaB = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_b.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueB = new Struct(expectedSchemaB)
.put("id", 11)
.put("name", "some_name")
.put("amount", 447);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
Assertions.assertThat(tableB).hasSize(1);
SourceRecordAssert.assertThat(tableB.get(0))
.valueAfterFieldIsEqualTo(expectedValueB)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaB);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void testColumnIncludeList() throws Exception {
connection.execute(
"CREATE TABLE include_list_column_table_a (id int, name varchar(30), amount integer primary key(id))",
"CREATE TABLE include_list_column_table_b (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "include_list_column_table_a");
TestHelper.enableTableCdc(connection, "include_list_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_INCLUDE_LIST, ".*id,.*name,dbo.include_list_column_table_b.amount")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO include_list_column_table_a VALUES(10, 'some_name', 120)");
connection.execute("INSERT INTO include_list_column_table_b VALUES(11, 'some_name', 447)");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.include_list_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.include_list_column_table_b");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Schema expectedSchemaB = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_b.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueB = new Struct(expectedSchemaB)
.put("id", 11)
.put("name", "some_name")
.put("amount", 447);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
Assertions.assertThat(tableB).hasSize(1);
SourceRecordAssert.assertThat(tableB.get(0))
.valueAfterFieldIsEqualTo(expectedValueB)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaB);
stopConnector();
}
@Test
@FixFor("DBZ-1692")
public void shouldConsumeEventsWithMaskedHashedColumns() throws Exception {
connection.execute(
"CREATE TABLE masked_hashed_column_table_a (id int, name varchar(255) primary key(id))",
"CREATE TABLE masked_hashed_column_table_b (id int, name varchar(20), primary key(id))");
TestHelper.enableTableCdc(connection, "masked_hashed_column_table_a");
TestHelper.enableTableCdc(connection, "masked_hashed_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with("column.mask.hash.SHA-256.with.salt.CzQMA0cB5K", "testDB.dbo.masked_hashed_column_table_a.name, testDB.dbo.masked_hashed_column_table_b.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO masked_hashed_column_table_a VALUES(10, 'some_name')");
connection.execute("INSERT INTO masked_hashed_column_table_b VALUES(11, 'some_name')");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.masked_hashed_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.masked_hashed_column_table_b");
assertThat(tableA).hasSize(1);
SourceRecord record = tableA.get(0);
VerifyRecord.isValidInsert(record, "id", 10);
Struct value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("3b225d0696535d66f2c0fb2e36b012c520d396af3dd8f18330b9c9cd23ca714e");
}
assertThat(tableB).hasSize(1);
record = tableB.get(0);
VerifyRecord.isValidInsert(record, "id", 11);
value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("3b225d0696535d66f2c0");
}
stopConnector();
}
@Test
@FixFor("DBZ-1972")
public void shouldConsumeEventsWithMaskedAndTruncatedColumns() throws Exception {
connection.execute(
"CREATE TABLE masked_hashed_column_table (id int, name varchar(255) primary key(id))",
"CREATE TABLE truncated_column_table (id int, name varchar(20), primary key(id))");
TestHelper.enableTableCdc(connection, "masked_hashed_column_table");
TestHelper.enableTableCdc(connection, "truncated_column_table");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with("column.mask.with.12.chars", "testDB.dbo.masked_hashed_column_table.name")
.with("column.truncate.to.4.chars", "testDB.dbo.truncated_column_table.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO masked_hashed_column_table VALUES(10, 'some_name')");
connection.execute("INSERT INTO truncated_column_table VALUES(11, 'some_name')");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.masked_hashed_column_table");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.truncated_column_table");
assertThat(tableA).hasSize(1);
SourceRecord record = tableA.get(0);
VerifyRecord.isValidInsert(record, "id", 10);
Struct value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("************");
}
assertThat(tableB).hasSize(1);
record = tableB.get(0);
VerifyRecord.isValidInsert(record, "id", 11);
value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("some");
}
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void whenCaptureInstanceExcludesColumnsExpectSnapshotAndStreamingToExcludeColumns() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'a name', 100)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO excluded_column_table_a VALUES(11, 'some_name', 120)");
final SourceRecords records = consumeRecordsByTopic(3);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueSnapshot = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "a name");
Struct expectedValueStreaming = new Struct(expectedSchemaA)
.put("id", 11)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueSnapshot);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueStreaming);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void whenMultipleCaptureInstancesExcludesColumnsExpectLatestCDCTableUtilized() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'a name', 100)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
connection.execute("ALTER TABLE excluded_column_table_a ADD note varchar(30)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a_2",
Arrays.asList("id", "name", "note"));
final Configuration config = TestHelper.defaultConfig()
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO excluded_column_table_a VALUES(11, 'some_name', 120, 'a note')");
final SourceRecords records = consumeRecordsByTopic(3);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchema = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("note", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueSnapshot = new Struct(expectedSchema)
.put("id", 10)
.put("name", "a name")
.put("note", null);
Struct expectedValueStreaming = new Struct(expectedSchema)
.put("id", 11)
.put("name", "some_name")
.put("note", "a note");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchema)
.valueAfterFieldIsEqualTo(expectedValueSnapshot);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchema)
.valueAfterFieldIsEqualTo(expectedValueStreaming);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
@Ignore // the test is very flaky in CI environment
public void whenCaptureInstanceExcludesColumnsAndColumnsRenamedExpectNoErrors() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'a name', 100)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(RelationalDatabaseConnectorConfig.TABLE_INCLUDE_LIST, ".*excluded_column_table_a")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForStreamingRunning("sql_server", "server1");
TestHelper.disableTableCdc(connection, "excluded_column_table_a");
connection.execute("EXEC sp_RENAME 'excluded_column_table_a.name', 'first_name', 'COLUMN'");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "first_name"));
connection.execute("INSERT INTO excluded_column_table_a VALUES(11, 'some_name', 120)");
TestHelper.waitForCdcRecord(connection, "excluded_column_table_a", "dbo_excluded_column_table_a", rs -> rs.getInt("id") == 11);
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchema1 = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueSnapshot = new Struct(expectedSchema1)
.put("id", 10)
.put("name", "a name");
Schema expectedSchema2 = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("first_name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueStreaming = new Struct(expectedSchema2)
.put("id", 11)
.put("first_name", "some_name");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchema1)
.valueAfterFieldIsEqualTo(expectedValueSnapshot);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchema2)
.valueAfterFieldIsEqualTo(expectedValueStreaming);
stopConnector();
}
@Test
@FixFor("DBZ-1068")
public void excludeColumnWhenCaptureInstanceExcludesColumns() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'some_name', 120)");
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void excludeColumnWhenCaptureInstanceExcludesColumnInMiddleOfTable() throws Exception {
connection.execute(
"CREATE TABLE exclude_list_column_table_a (id int, amount integer, name varchar(30), primary key(id))");
connection.execute("INSERT INTO exclude_list_column_table_a VALUES(10, 100, 'a name')");
TestHelper.enableTableCdc(connection, "exclude_list_column_table_a", "dbo_exclude_list_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(RelationalDatabaseConnectorConfig.TABLE_INCLUDE_LIST, ".*exclude_list_column_table_a")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO exclude_list_column_table_a VALUES(11, 120, 'some_name')");
TestHelper.waitForCdcRecord(connection, "exclude_list_column_table_a", rs -> rs.getInt("id") == 11);
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.exclude_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.exclude_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValue1 = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "a name");
Struct expectedValue2 = new Struct(expectedSchemaA)
.put("id", 11)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValue1);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValue2);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void includeColumnsWhenCaptureInstanceExcludesColumnInMiddleOfTable() throws Exception {
connection.execute(
"CREATE TABLE include_list_column_table_a (id int, amount integer, name varchar(30), primary key(id))");
TestHelper.enableTableCdc(connection, "include_list_column_table_a", "dbo_include_list_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_INCLUDE_LIST, "dbo.include_list_column_table_a.id,dbo.include_list_column_table_a.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO include_list_column_table_a VALUES(10, 120, 'some_name')");
TestHelper.waitForCdcRecord(connection, "include_list_column_table_a", rs -> rs.getInt("id") == 10);
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.include_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void excludeMultipleColumnsWhenCaptureInstanceExcludesSingleColumn() throws Exception {
connection.execute(
"CREATE TABLE exclude_list_column_table_a (id int, amount integer, note varchar(30), name varchar(30), primary key(id))");
TestHelper.enableTableCdc(connection, "exclude_list_column_table_a", "dbo_exclude_list_column_table_a",
Arrays.asList("id", "note", "name"));
// Exclude the note column on top of the already excluded amount column
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_EXCLUDE_LIST, "dbo.exclude_list_column_table_a.amount,dbo.exclude_list_column_table_a.note")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO exclude_list_column_table_a VALUES(10, 120, 'a note', 'some_name')");
TestHelper.waitForCdcRecord(connection, "exclude_list_column_table_a", rs -> rs.getInt("id") == 10);
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.exclude_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.exclude_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void includeMultipleColumnsWhenCaptureInstanceExcludesSingleColumn() throws Exception {
connection.execute(
"CREATE TABLE include_list_column_table_a (id int, amount integer, note varchar(30), name varchar(30), primary key(id))");
TestHelper.enableTableCdc(connection, "include_list_column_table_a", "dbo_include_list_column_table_a",
Arrays.asList("id", "note", "name"));
// Exclude the note column on top of the already excluded amount column
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_INCLUDE_LIST, "dbo.include_list_column_table_a.id,dbo.include_list_column_table_a.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO include_list_column_table_a VALUES(10, 120, 'a note', 'some_name')");
TestHelper.waitForCdcRecord(connection, "include_list_column_table_a", rs -> rs.getInt("id") == 10);
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.include_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
/**
* Passing the "applicationName" property which can be asserted from the connected sessions".
*/
@Test
@FixFor("DBZ-964")
public void shouldPropagateDatabaseDriverProperties() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with("database.applicationName", "Debezium App DBZ-964")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// consuming one record to make sure the connector establishes the DB connection which happens asynchronously
// after the start() call
connection.execute("INSERT INTO tablea VALUES(964, 'a')");
consumeRecordsByTopic(1);
connection.query("select count(1) from sys.dm_exec_sessions where program_name = 'Debezium App DBZ-964'", rs -> {
rs.next();
assertThat(rs.getInt(1)).isGreaterThanOrEqualTo(1);
});
}
private void restartInTheMiddleOfTx(boolean restartJustAfterSnapshot, boolean afterStreaming) throws Exception {
final int RECORDS_PER_TABLE = 30;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 1000;
final int HALF_ID = ID_START + RECORDS_PER_TABLE / 2;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
if (restartJustAfterSnapshot) {
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot to be completed
consumeRecordsByTopic(1);
stopConnector();
connection.execute("INSERT INTO tablea VALUES(-1, '-a')");
}
start(SqlServerConnector.class, config, record -> {
if (!"server1.dbo.tablea.Envelope".equals(record.valueSchema().name())) {
return false;
}
final Struct envelope = (Struct) record.value();
final Struct after = envelope.getStruct("after");
final Integer id = after.getInt32("id");
final String value = after.getString("cola");
return id != null && id == HALF_ID && "a".equals(value);
});
assertConnectorIsRunning();
// Wait for snapshot to be completed or a first streaming message delivered
consumeRecordsByTopic(1);
if (afterStreaming) {
connection.execute("INSERT INTO tablea VALUES(-2, '-a')");
final SourceRecords records = consumeRecordsByTopic(1);
final List<SchemaAndValueField> expectedRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, -2),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "-a"));
assertRecord(((Struct) records.allRecordsInOrder().get(0).value()).getStruct(Envelope.FieldName.AFTER), expectedRow);
}
connection.setAutoCommit(false);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.executeWithoutCommitting(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.executeWithoutCommitting(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
connection.connection().commit();
TestHelper.waitForCdcRecord(connection, "tablea", rs -> rs.getInt("id") == (ID_START + RECORDS_PER_TABLE - 1));
TestHelper.waitForCdcRecord(connection, "tableb", rs -> rs.getInt("id") == (ID_START + RECORDS_PER_TABLE - 1));
List<SourceRecord> records = consumeRecordsByTopic(RECORDS_PER_TABLE).allRecordsInOrder();
assertThat(records).hasSize(RECORDS_PER_TABLE);
SourceRecord lastRecordForOffset = records.get(RECORDS_PER_TABLE - 1);
Struct value = (Struct) lastRecordForOffset.value();
final List<SchemaAndValueField> expectedLastRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, HALF_ID - 1),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
assertRecord((Struct) value.get("after"), expectedLastRow);
stopConnector();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
SourceRecords sourceRecords = consumeRecordsByTopic(RECORDS_PER_TABLE);
records = sourceRecords.allRecordsInOrder();
assertThat(records).hasSize(RECORDS_PER_TABLE);
List<SourceRecord> tableA = sourceRecords.recordsForTopic("server1.dbo.tablea");
List<SourceRecord> tableB = sourceRecords.recordsForTopic("server1.dbo.tableb");
for (int i = 0; i < RECORDS_PER_TABLE / 2; i++) {
final int id = HALF_ID + i;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.executeWithoutCommitting(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.executeWithoutCommitting(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
connection.connection().commit();
}
TestHelper.waitForCdcRecord(connection, "tablea", rs -> rs.getInt("id") == (ID_RESTART + RECORDS_PER_TABLE - 1));
TestHelper.waitForCdcRecord(connection, "tableb", rs -> rs.getInt("id") == (ID_RESTART + RECORDS_PER_TABLE - 1));
sourceRecords = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
tableA = sourceRecords.recordsForTopic("server1.dbo.tablea");
tableB = sourceRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = i + ID_RESTART;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
}
@Test
@FixFor("DBZ-1128")
public void restartInTheMiddleOfTxAfterSnapshot() throws Exception {
restartInTheMiddleOfTx(true, false);
}
@Test
@FixFor("DBZ-1128")
public void restartInTheMiddleOfTxAfterCompletedTx() throws Exception {
restartInTheMiddleOfTx(false, true);
}
@Test
@FixFor("DBZ-1128")
public void restartInTheMiddleOfTx() throws Exception {
restartInTheMiddleOfTx(false, false);
}
@Test
@FixFor("DBZ-1242")
public void testEmptySchemaWarningAfterApplyingFilters() throws Exception {
// This captures all logged messages, allowing us to verify log message was written.
final LogInterceptor logInterceptor = new LogInterceptor();
Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "my_products")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForAvailableRecords(100, TimeUnit.MILLISECONDS);
stopConnector(value -> assertThat(logInterceptor.containsWarnMessage(DatabaseSchema.NO_CAPTURED_DATA_COLLECTIONS_WARNING)).isTrue());
}
@Test
@FixFor("DBZ-1242")
public void testNoEmptySchemaWarningAfterApplyingFilters() throws Exception {
// This captures all logged messages, allowing us to verify log message was written.
final LogInterceptor logInterceptor = new LogInterceptor();
Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForAvailableRecords(100, TimeUnit.MILLISECONDS);
stopConnector(value -> assertThat(logInterceptor.containsWarnMessage(DatabaseSchema.NO_CAPTURED_DATA_COLLECTIONS_WARNING)).isFalse());
}
@Test
@FixFor("DBZ-916")
public void keylessTable() throws Exception {
connection.execute(
"CREATE TABLE keyless (id int, name varchar(30))",
"INSERT INTO keyless VALUES(1, 'k')");
TestHelper.enableTableCdc(connection, "keyless");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.keyless")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final List<SchemaAndValueField> key = Arrays.asList(
new SchemaAndValueField("id", Schema.OPTIONAL_INT32_SCHEMA, 1),
new SchemaAndValueField("name", Schema.OPTIONAL_STRING_SCHEMA, "k"));
final List<SchemaAndValueField> key2 = Arrays.asList(
new SchemaAndValueField("id", Schema.OPTIONAL_INT32_SCHEMA, 2),
new SchemaAndValueField("name", Schema.OPTIONAL_STRING_SCHEMA, "k"));
final List<SchemaAndValueField> key3 = Arrays.asList(
new SchemaAndValueField("id", Schema.OPTIONAL_INT32_SCHEMA, 3),
new SchemaAndValueField("name", Schema.OPTIONAL_STRING_SCHEMA, "k"));
// Wait for snapshot completion
SourceRecords records = consumeRecordsByTopic(1);
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).keySchema()).isNull();
connection.execute(
"INSERT INTO keyless VALUES(2, 'k')");
records = consumeRecordsByTopic(1);
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
connection.execute(
"UPDATE keyless SET id=3 WHERE ID=2");
records = consumeRecordsByTopic(3);
final SourceRecord update1 = records.recordsForTopic("server1.dbo.keyless").get(0);
assertThat(update1.key()).isNull();
assertThat(update1.keySchema()).isNull();
assertRecord(((Struct) update1.value()).getStruct(Envelope.FieldName.BEFORE), key2);
assertRecord(((Struct) update1.value()).getStruct(Envelope.FieldName.AFTER), key3);
connection.execute(
"DELETE FROM keyless WHERE id=3");
records = consumeRecordsByTopic(2, false);
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).keySchema()).isNull();
assertNull(records.recordsForTopic("server1.dbo.keyless").get(1).value());
stopConnector();
}
@Test
@FixFor("DBZ-1015")
public void shouldRewriteIdentityKey() throws InterruptedException, SQLException {
connection.execute(
"CREATE TABLE keyless (id int, name varchar(30))",
"INSERT INTO keyless VALUES(1, 'k')");
TestHelper.enableTableCdc(connection, "keyless");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.keyless")
// rewrite key from table 'products': from {null} to {id}
.with(SqlServerConnectorConfig.MSG_KEY_COLUMNS, "(.*).keyless:id")
.build();
start(SqlServerConnector.class, config);
SourceRecords records = consumeRecordsByTopic(1);
List<SourceRecord> recordsForTopic = records.recordsForTopic("server1.dbo.keyless");
assertThat(recordsForTopic.get(0).key()).isNotNull();
Struct key = (Struct) recordsForTopic.get(0).key();
Assertions.assertThat(key.get("id")).isNotNull();
stopConnector();
}
@Test
@FixFor("DBZ-1491")
public void shouldCaptureTableSchema() throws SQLException, InterruptedException {
connection.execute(
"CREATE TABLE table_schema_test (key_cola int not null,"
+ "key_colb varchar(10) not null,"
+ "cola int not null,"
+ "colb datetimeoffset not null default ('2019-01-01 12:34:56.1234567+04:00'),"
+ "colc varchar(20) default ('default_value'),"
+ "cold float,"
+ "primary key(key_cola, key_colb))");
TestHelper.enableTableCdc(connection, "table_schema_test");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
TestHelper.waitForSnapshotToBeCompleted();
connection.execute(
"INSERT INTO table_schema_test (key_cola, key_colb, cola, colb, colc, cold) VALUES(1, 'a', 100, '2019-01-01 10:20:39.1234567 +02:00', 'some_value', 100.20)");
List<SourceRecord> records = consumeRecordsByTopic(1).recordsForTopic("server1.dbo.table_schema_test");
assertThat(records).hasSize(1);
SourceRecordAssert.assertThat(records.get(0))
.keySchemaIsEqualTo(SchemaBuilder.struct()
.name("server1.dbo.table_schema_test.Key")
.field("key_cola", Schema.INT32_SCHEMA)
.field("key_colb", Schema.STRING_SCHEMA)
.build())
.valueAfterFieldSchemaIsEqualTo(SchemaBuilder.struct()
.optional()
.name("server1.dbo.table_schema_test.Value")
.field("key_cola", Schema.INT32_SCHEMA)
.field("key_colb", Schema.STRING_SCHEMA)
.field("cola", Schema.INT32_SCHEMA)
.field("colb",
SchemaBuilder.string().name("io.debezium.time.ZonedTimestamp").required().defaultValue("2019-01-01T12:34:56.1234567+04:00").version(1)
.build())
.field("colc", SchemaBuilder.string().optional().defaultValue("default_value").build())
.field("cold", Schema.OPTIONAL_FLOAT64_SCHEMA)
.build());
stopConnector();
}
@Test
@FixFor("DBZ-1923")
public void shouldDetectPurgedHistory() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 100;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.DATABASE_HISTORY, PurgableFileDatabaseHistory.class)
.build();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute("INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute("INSERT INTO tableb VALUES(" + id + ", 'b')");
}
Awaitility.await().atMost(30, TimeUnit.SECONDS).pollInterval(100, TimeUnit.MILLISECONDS).until(() -> {
Testing.debug("Waiting for initial changes to be propagated to CDC structures");
return connection.getMaxLsn().isAvailable();
});
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
List<SourceRecord> records = consumeRecordsByTopic(1 + RECORDS_PER_TABLE * TABLES).allRecordsInOrder();
records = records.subList(1, records.size());
for (Iterator<SourceRecord> it = records.iterator(); it.hasNext();) {
SourceRecord record = it.next();
assertThat(record.sourceOffset().get("snapshot")).as("Snapshot phase").isEqualTo(true);
if (it.hasNext()) {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot in progress").isEqualTo(false);
}
else {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot completed").isEqualTo(true);
}
}
stopConnector();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.execute("INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute("INSERT INTO tableb VALUES(" + id + ", 'b')");
}
Testing.Files.delete(TestHelper.DB_HISTORY_PATH);
final LogInterceptor logInterceptor = new LogInterceptor();
start(SqlServerConnector.class, config);
assertConnectorNotRunning();
assertThat(logInterceptor.containsStacktraceElement(
"The db history topic or its content is fully or partially missing. Please check database history topic configuration and re-execute the snapshot."))
.isTrue();
}
@Test
@FixFor("DBZ-1988")
public void shouldHonorSourceTimestampMode() throws InterruptedException, SQLException {
connection.execute("CREATE TABLE source_timestamp_mode (id int, name varchar(30) primary key(id))");
TestHelper.enableTableCdc(connection, "source_timestamp_mode");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.source_timestamp_mode")
.with(SqlServerConnectorConfig.SOURCE_TIMESTAMP_MODE, "processing")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO source_timestamp_mode VALUES(1, 'abc')");
SourceRecords records = consumeRecordsByTopic(1);
List<SourceRecord> recordsForTopic = records.recordsForTopic("server1.dbo.source_timestamp_mode");
SourceRecord record = recordsForTopic.get(0);
long eventTs = (long) ((Struct) record.value()).get("ts_ms");
long sourceTs = (long) ((Struct) ((Struct) record.value()).get("source")).get("ts_ms");
// it's not exactly the same as ts_ms, but close enough;
assertThat(eventTs - sourceTs).isLessThan(100);
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useShortTableNamesForColumnMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with("column.mask.with.4.chars", "dbo.tablea.cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
Assertions.assertThat(valueA.getStruct("after").getString("cola")).isEqualTo("****");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useLongTableNamesForColumnMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with("column.mask.with.4.chars", "testDB.dbo.tablea.cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
Assertions.assertThat(valueA.getStruct("after").getString("cola")).isEqualTo("****");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useLongTableNamesForKeyMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.MSG_KEY_COLUMNS, "testDB.dbo.tablea:cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
Assertions.assertThat(keyA.getString("cola")).isEqualTo("a");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useShortTableNamesForKeyMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.MSG_KEY_COLUMNS, "dbo.tablea:cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
Assertions.assertThat(keyA.getString("cola")).isEqualTo("a");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor({ "DBZ-1916", "DBZ-1830" })
public void shouldPropagateSourceTypeByDatatype() throws Exception {
connection.execute("CREATE TABLE dt_table (id int, c1 int, c2 int, c3a numeric(5,2), c3b varchar(128), f1 float(10), f2 decimal(8,4) primary key(id))");
TestHelper.enableTableCdc(connection, "dt_table");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.dt_table")
.with("datatype.propagate.source.type", ".+\\.NUMERIC,.+\\.VARCHAR,.+\\.REAL,.+\\.DECIMAL")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO dt_table (id,c1,c2,c3a,c3b,f1,f2) values (1, 123, 456, 789.01, 'test', 1.228, 234.56)");
SourceRecords records = consumeRecordsByTopic(1);
List<SourceRecord> recordsForTopic = records.recordsForTopic("server1.dbo.dt_table");
final SourceRecord record = recordsForTopic.get(0);
final Field before = record.valueSchema().field("before");
assertThat(before.schema().field("id").schema().parameters()).isNull();
assertThat(before.schema().field("c1").schema().parameters()).isNull();
assertThat(before.schema().field("c2").schema().parameters()).isNull();
assertThat(before.schema().field("c3a").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "NUMERIC"),
entry(TYPE_LENGTH_PARAMETER_KEY, "5"),
entry(TYPE_SCALE_PARAMETER_KEY, "2"));
assertThat(before.schema().field("c3b").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "VARCHAR"),
entry(TYPE_LENGTH_PARAMETER_KEY, "128"));
assertThat(before.schema().field("f2").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "DECIMAL"),
entry(TYPE_LENGTH_PARAMETER_KEY, "8"),
entry(TYPE_SCALE_PARAMETER_KEY, "4"));
assertThat(before.schema().field("f1").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "REAL"),
entry(TYPE_LENGTH_PARAMETER_KEY, "24"));
stopConnector();
}
@Test
@FixFor("DBZ-2379")
public void shouldNotStreamWhenUsingSnapshotModeInitialOnly() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL_ONLY)
.build();
final LogInterceptor logInterceptor = new LogInterceptor();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
// should be no more records
assertNoRecordsToConsume();
final String message = "Streaming is not enabled in current configuration";
stopConnector(value -> assertThat(logInterceptor.containsMessage(message)).isTrue());
}
@Test
@FixFor("DBZ-2582")
public void testMaxLsnSelectStatementWithDefault() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
@FixFor("DBZ-2582")
public void testMaxLsnSelectStatementWithFalse() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.MAX_LSN_OPTIMIZATION, false)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
private void assertRecord(Struct record, List<SchemaAndValueField> expected) {
expected.forEach(schemaAndValueField -> schemaAndValueField.assertFor(record));
}
public static class PurgableFileDatabaseHistory implements DatabaseHistory {
final DatabaseHistory delegate = new FileDatabaseHistory();
@Override
public boolean exists() {
try {
return storageExists() && java.nio.file.Files.size(TestHelper.DB_HISTORY_PATH) > 0;
}
catch (IOException e) {
throw new DatabaseHistoryException("File should exist");
}
}
@Override
public void configure(Configuration config, HistoryRecordComparator comparator,
DatabaseHistoryListener listener, boolean useCatalogBeforeSchema) {
delegate.configure(config, comparator, listener, useCatalogBeforeSchema);
}
@Override
public void start() {
delegate.start();
}
@Override
public void record(Map<String, ?> source, Map<String, ?> position, String databaseName, String ddl)
throws DatabaseHistoryException {
delegate.record(source, position, databaseName, ddl);
}
@Override
public void record(Map<String, ?> source, Map<String, ?> position, String databaseName, String schemaName,
String ddl, TableChanges changes)
throws DatabaseHistoryException {
delegate.record(source, position, databaseName, schemaName, ddl, changes);
}
@Override
public void recover(Map<String, ?> source, Map<String, ?> position, Tables schema, DdlParser ddlParser) {
delegate.recover(source, position, schema, ddlParser);
}
@Override
public void stop() {
delegate.stop();
}
@Override
public boolean storageExists() {
return delegate.storageExists();
}
@Override
public void initializeStorage() {
delegate.initializeStorage();
}
}
}
|
debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SqlServerConnectorIT.java
|
/*
* Copyright Debezium Authors.
*
* Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package io.debezium.connector.sqlserver;
import static io.debezium.connector.sqlserver.util.TestHelper.TYPE_LENGTH_PARAMETER_KEY;
import static io.debezium.connector.sqlserver.util.TestHelper.TYPE_NAME_PARAMETER_KEY;
import static io.debezium.connector.sqlserver.util.TestHelper.TYPE_SCALE_PARAMETER_KEY;
import static org.fest.assertions.Assertions.assertThat;
import static org.fest.assertions.MapAssert.entry;
import static org.junit.Assert.assertNull;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.connect.data.Field;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaBuilder;
import org.apache.kafka.connect.data.Struct;
import org.apache.kafka.connect.source.SourceRecord;
import org.awaitility.Awaitility;
import org.fest.assertions.Assertions;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import io.debezium.config.Configuration;
import io.debezium.connector.sqlserver.SqlServerConnectorConfig.SnapshotMode;
import io.debezium.connector.sqlserver.util.TestHelper;
import io.debezium.data.Envelope;
import io.debezium.data.SchemaAndValueField;
import io.debezium.data.SourceRecordAssert;
import io.debezium.data.VerifyRecord;
import io.debezium.doc.FixFor;
import io.debezium.embedded.AbstractConnectorTest;
import io.debezium.junit.logging.LogInterceptor;
import io.debezium.relational.RelationalDatabaseConnectorConfig;
import io.debezium.relational.Tables;
import io.debezium.relational.ddl.DdlParser;
import io.debezium.relational.history.DatabaseHistory;
import io.debezium.relational.history.DatabaseHistoryException;
import io.debezium.relational.history.DatabaseHistoryListener;
import io.debezium.relational.history.FileDatabaseHistory;
import io.debezium.relational.history.HistoryRecordComparator;
import io.debezium.relational.history.TableChanges;
import io.debezium.schema.DatabaseSchema;
import io.debezium.util.Testing;
/**
* Integration test for the Debezium SQL Server connector.
*
* @author Jiri Pechanec
*/
public class SqlServerConnectorIT extends AbstractConnectorTest {
private SqlServerConnection connection;
@Before
public void before() throws SQLException {
TestHelper.createTestDatabase();
connection = TestHelper.testConnection();
connection.execute(
"CREATE TABLE tablea (id int primary key, cola varchar(30))",
"CREATE TABLE tableb (id int primary key, colb varchar(30))",
"INSERT INTO tablea VALUES(1, 'a')");
TestHelper.enableTableCdc(connection, "tablea");
TestHelper.enableTableCdc(connection, "tableb");
initializeConnectorTestFramework();
Testing.Files.delete(TestHelper.DB_HISTORY_PATH);
// Testing.Print.enable();
}
@After
public void after() throws SQLException {
if (connection != null) {
connection.close();
}
}
@Test
public void createAndDelete() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
connection.execute("DELETE FROM tableB");
final SourceRecords deleteRecords = consumeRecordsByTopic(2 * RECORDS_PER_TABLE);
final List<SourceRecord> deleteTableA = deleteRecords.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> deleteTableB = deleteRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(deleteTableA).isNullOrEmpty();
Assertions.assertThat(deleteTableB).hasSize(2 * RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord deleteRecord = deleteTableB.get(i * 2);
final SourceRecord tombstoneRecord = deleteTableB.get(i * 2 + 1);
final List<SchemaAndValueField> expectedDeleteRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct deleteKey = (Struct) deleteRecord.key();
final Struct deleteValue = (Struct) deleteRecord.value();
assertRecord((Struct) deleteValue.get("before"), expectedDeleteRow);
assertNull(deleteValue.get("after"));
final Struct tombstoneKey = (Struct) tombstoneRecord.key();
final Struct tombstoneValue = (Struct) tombstoneRecord.value();
assertNull(tombstoneValue);
}
stopConnector();
}
@Test
@FixFor("DBZ-1642")
public void readOnlyApplicationIntent() throws Exception {
final LogInterceptor logInterceptor = new LogInterceptor();
final String appId = "readOnlyApplicationIntent-" + UUID.randomUUID();
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with("database.applicationIntent", "ReadOnly")
.with("database.applicationName", appId)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
TestHelper.waitForSnapshotToBeCompleted();
consumeRecordsByTopic(1);
TestHelper.waitForStreamingStarted();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES, 24);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
assertThat(logInterceptor.containsMessage("Schema locking was disabled in connector configuration")).isTrue();
// Verify that multiple subsequent transactions are used in streaming phase with read-only intent
try (final SqlServerConnection admin = TestHelper.adminConnection()) {
final Set<Long> txIds = new HashSet<>();
Awaitility.await().atMost(TestHelper.waitTimeForRecords() * 5, TimeUnit.SECONDS).pollInterval(100, TimeUnit.MILLISECONDS).until(() -> {
admin.query(
"SELECT (SELECT transaction_id FROM sys.dm_tran_session_transactions AS t WHERE s.session_id=t.session_id) FROM sys.dm_exec_sessions AS s WHERE program_name='"
+ appId + "'",
rs -> {
rs.next();
txIds.add(rs.getLong(1));
});
return txIds.size() > 2;
});
}
stopConnector();
}
@Test
@FixFor("DBZ-1643")
public void timestampAndTimezone() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final TimeZone currentTimeZone = TimeZone.getDefault();
try {
TimeZone.setDefault(TimeZone.getTimeZone("Australia/Canberra"));
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
final Instant now = Instant.now();
final Instant lowerBound = now.minusSeconds(5 * 60);
final Instant upperBound = now.plusSeconds(5 * 60);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final long timestamp = ((Struct) recordA.value()).getStruct("source").getInt64("ts_ms");
final Instant instant = Instant.ofEpochMilli(timestamp);
Assertions.assertThat(instant.isAfter(lowerBound) && instant.isBefore(upperBound)).isTrue();
}
stopConnector();
}
finally {
TimeZone.setDefault(currentTimeZone);
}
}
@Test
public void deleteWithoutTombstone() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TOMBSTONES_ON_DELETE, false)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
connection.execute("DELETE FROM tableB");
final SourceRecords deleteRecords = consumeRecordsByTopic(RECORDS_PER_TABLE);
final List<SourceRecord> deleteTableA = deleteRecords.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> deleteTableB = deleteRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(deleteTableA).isNullOrEmpty();
Assertions.assertThat(deleteTableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord deleteRecord = deleteTableB.get(i);
final List<SchemaAndValueField> expectedDeleteRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct deleteKey = (Struct) deleteRecord.key();
final Struct deleteValue = (Struct) deleteRecord.value();
assertRecord((Struct) deleteValue.get("before"), expectedDeleteRow);
assertNull(deleteValue.get("after"));
}
stopConnector();
}
@Test
public void update() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Testing.Print.enable();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
final String[] tableBInserts = new String[RECORDS_PER_TABLE];
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
tableBInserts[i] = "INSERT INTO tableb VALUES(" + id + ", 'b')";
}
connection.execute(tableBInserts);
connection.setAutoCommit(true);
connection.execute("UPDATE tableb SET colb='z'");
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * 2);
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE * 2);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordB = tableB.get(i + RECORDS_PER_TABLE);
final List<SchemaAndValueField> expectedBefore = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedAfter = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "z"));
final Struct keyB = (Struct) recordB.key();
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("before"), expectedBefore);
assertRecord((Struct) valueB.get("after"), expectedAfter);
}
stopConnector();
}
@Test
public void updatePrimaryKey() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Testing.Print.enable();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO tableb VALUES(1, 'b')");
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
connection.execute(
"UPDATE tablea SET id=100 WHERE id=1",
"UPDATE tableb SET id=100 WHERE id=1");
final SourceRecords records = consumeRecordsByTopic(6);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(3);
Assertions.assertThat(tableB).hasSize(3);
final List<SchemaAndValueField> expectedDeleteRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedDeleteKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedInsertKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordA = tableA.get(0);
final SourceRecord tombstoneRecordA = tableA.get(1);
final SourceRecord insertRecordA = tableA.get(2);
final Struct deleteKeyA = (Struct) deleteRecordA.key();
final Struct deleteValueA = (Struct) deleteRecordA.value();
assertRecord(deleteValueA.getStruct("before"), expectedDeleteRowA);
assertRecord(deleteKeyA, expectedDeleteKeyA);
assertNull(deleteValueA.get("after"));
final Struct tombstoneKeyA = (Struct) tombstoneRecordA.key();
final Struct tombstoneValueA = (Struct) tombstoneRecordA.value();
assertRecord(tombstoneKeyA, expectedDeleteKeyA);
assertNull(tombstoneValueA);
final Struct insertKeyA = (Struct) insertRecordA.key();
final Struct insertValueA = (Struct) insertRecordA.value();
assertRecord(insertValueA.getStruct("after"), expectedInsertRowA);
assertRecord(insertKeyA, expectedInsertKeyA);
assertNull(insertValueA.get("before"));
final List<SchemaAndValueField> expectedDeleteRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedDeleteKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedInsertKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordB = tableB.get(0);
final SourceRecord tombstoneRecordB = tableB.get(1);
final SourceRecord insertRecordB = tableB.get(2);
final Struct deletekeyB = (Struct) deleteRecordB.key();
final Struct deleteValueB = (Struct) deleteRecordB.value();
assertRecord(deleteValueB.getStruct("before"), expectedDeleteRowB);
assertRecord(deletekeyB, expectedDeleteKeyB);
assertNull(deleteValueB.get("after"));
assertThat(deleteValueB.getStruct("source").getInt64("event_serial_no")).isEqualTo(1L);
final Struct tombstonekeyB = (Struct) tombstoneRecordB.key();
final Struct tombstoneValueB = (Struct) tombstoneRecordB.value();
assertRecord(tombstonekeyB, expectedDeleteKeyB);
assertNull(tombstoneValueB);
final Struct insertkeyB = (Struct) insertRecordB.key();
final Struct insertValueB = (Struct) insertRecordB.value();
assertRecord(insertValueB.getStruct("after"), expectedInsertRowB);
assertRecord(insertkeyB, expectedInsertKeyB);
assertNull(insertValueB.get("before"));
assertThat(insertValueB.getStruct("source").getInt64("event_serial_no")).isEqualTo(2L);
stopConnector();
}
@Test
@FixFor("DBZ-1152")
public void updatePrimaryKeyWithRestartInMiddle() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config, record -> {
final Struct envelope = (Struct) record.value();
return envelope != null && "c".equals(envelope.get("op")) && (envelope.getStruct("after").getInt32("id") == 100);
});
assertConnectorIsRunning();
// Testing.Print.enable();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO tableb VALUES(1, 'b')");
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
connection.execute(
"UPDATE tablea SET id=100 WHERE id=1",
"UPDATE tableb SET id=100 WHERE id=1");
final SourceRecords records1 = consumeRecordsByTopic(2);
stopConnector();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords records2 = consumeRecordsByTopic(4);
final List<SourceRecord> tableA = records1.recordsForTopic("server1.dbo.tablea");
tableA.addAll(records2.recordsForTopic("server1.dbo.tablea"));
final List<SourceRecord> tableB = records2.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(3);
Assertions.assertThat(tableB).hasSize(3);
final List<SchemaAndValueField> expectedDeleteRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedDeleteKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedInsertKeyA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordA = tableA.get(0);
final SourceRecord tombstoneRecordA = tableA.get(1);
final SourceRecord insertRecordA = tableA.get(2);
final Struct deleteKeyA = (Struct) deleteRecordA.key();
final Struct deleteValueA = (Struct) deleteRecordA.value();
assertRecord(deleteValueA.getStruct("before"), expectedDeleteRowA);
assertRecord(deleteKeyA, expectedDeleteKeyA);
assertNull(deleteValueA.get("after"));
final Struct tombstoneKeyA = (Struct) tombstoneRecordA.key();
final Struct tombstoneValueA = (Struct) tombstoneRecordA.value();
assertRecord(tombstoneKeyA, expectedDeleteKeyA);
assertNull(tombstoneValueA);
final Struct insertKeyA = (Struct) insertRecordA.key();
final Struct insertValueA = (Struct) insertRecordA.value();
assertRecord(insertValueA.getStruct("after"), expectedInsertRowA);
assertRecord(insertKeyA, expectedInsertKeyA);
assertNull(insertValueA.get("before"));
final List<SchemaAndValueField> expectedDeleteRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedDeleteKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 1));
final List<SchemaAndValueField> expectedInsertRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final List<SchemaAndValueField> expectedInsertKeyB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, 100));
final SourceRecord deleteRecordB = tableB.get(0);
final SourceRecord tombstoneRecordB = tableB.get(1);
final SourceRecord insertRecordB = tableB.get(2);
final Struct deletekeyB = (Struct) deleteRecordB.key();
final Struct deleteValueB = (Struct) deleteRecordB.value();
assertRecord(deleteValueB.getStruct("before"), expectedDeleteRowB);
assertRecord(deletekeyB, expectedDeleteKeyB);
assertNull(deleteValueB.get("after"));
final Struct tombstonekeyB = (Struct) tombstoneRecordB.key();
final Struct tombstoneValueB = (Struct) tombstoneRecordB.value();
assertRecord(tombstonekeyB, expectedDeleteKeyB);
assertNull(tombstoneValueB);
final Struct insertkeyB = (Struct) insertRecordB.key();
final Struct insertValueB = (Struct) insertRecordB.value();
assertRecord(insertValueB.getStruct("after"), expectedInsertRowB);
assertRecord(insertkeyB, expectedInsertKeyB);
assertNull(insertValueB.get("before"));
stopConnector();
}
@Test
@FixFor("DBZ-2329")
public void updatePrimaryKeyTwiceWithRestartInMiddleOfTx() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.MAX_QUEUE_SIZE, 2)
.with(SqlServerConnectorConfig.MAX_BATCH_SIZE, 1)
.with(SqlServerConnectorConfig.TOMBSTONES_ON_DELETE, false)
.build();
// Testing.Print.enable();
// Wait for snapshot completion
start(SqlServerConnector.class, config, record -> {
final Struct envelope = (Struct) record.value();
boolean stop = envelope != null && "d".equals(envelope.get("op")) && (envelope.getStruct("before").getInt32("id") == 305);
return stop;
});
assertConnectorIsRunning();
consumeRecordsByTopic(1);
connection.setAutoCommit(false);
connection.execute("INSERT INTO tableb (id, colb) values (1,'1')");
connection.execute("INSERT INTO tableb (id, colb) values (2,'2')");
connection.execute("INSERT INTO tableb (id, colb) values (3,'3')");
connection.execute("INSERT INTO tableb (id, colb) values (4,'4')");
connection.execute("INSERT INTO tableb (id, colb) values (5,'5')");
consumeRecordsByTopic(5);
connection.execute("UPDATE tableb set id = colb + 300");
connection.execute("UPDATE tableb set id = colb + 300");
final SourceRecords records1 = consumeRecordsByTopic(14);
stopConnector();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords records2 = consumeRecordsByTopic(6);
final List<SourceRecord> tableB = records1.recordsForTopic("server1.dbo.tableb");
tableB.addAll(records2.recordsForTopic("server1.dbo.tableb"));
Assertions.assertThat(tableB).hasSize(20);
stopConnector();
}
@Test
public void streamChangesWhileStopped() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 100;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
stopConnector();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = i + ID_RESTART;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
}
@Test
@FixFor("DBZ-1069")
public void verifyOffsets() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 100;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
final List<Integer> expectedIds = new ArrayList<>();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
expectedIds.add(id);
}
final String tableaCT = connection.getNameOfChangeTable("tablea");
final String tablebCT = connection.getNameOfChangeTable("tableb");
TestHelper.waitForCdcRecord(connection, "tableb", rs -> rs.getInt("id") == expectedIds.get(expectedIds.size() - 1));
Awaitility.await().atMost(30, TimeUnit.SECONDS).until(() -> {
// Wait for max lsn to be available
if (!connection.getMaxLsn().isAvailable()) {
return false;
}
// verify pre-snapshot inserts have succeeded
Map<String, Boolean> resultMap = new HashMap<>();
connection.listOfChangeTables().forEach(ct -> {
final String tableName = ct.getChangeTableId().table();
if (tableName.endsWith("dbo_" + tableaCT) || tableName.endsWith("dbo_" + tablebCT)) {
try {
final Lsn minLsn = connection.getMinLsn(tableName);
final Lsn maxLsn = connection.getMaxLsn();
SqlServerChangeTable[] tables = Collections.singletonList(ct).toArray(new SqlServerChangeTable[]{});
final List<Integer> ids = new ArrayList<>();
connection.getChangesForTables(tables, minLsn, maxLsn, resultsets -> {
final ResultSet rs = resultsets[0];
while (rs.next()) {
ids.add(rs.getInt("id"));
}
});
if (ids.equals(expectedIds)) {
resultMap.put(tableName, true);
}
else {
resultMap.put(tableName, false);
}
}
catch (Exception e) {
org.junit.Assert.fail("Failed to fetch changes for table " + tableName + ": " + e.getMessage());
}
}
});
return resultMap.values().stream().filter(v -> !v).count() == 0;
});
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
List<SourceRecord> records = consumeRecordsByTopic(1 + RECORDS_PER_TABLE * TABLES).allRecordsInOrder();
records = records.subList(1, records.size());
for (Iterator<SourceRecord> it = records.iterator(); it.hasNext();) {
SourceRecord record = it.next();
assertThat(record.sourceOffset().get("snapshot")).as("Snapshot phase").isEqualTo(true);
if (it.hasNext()) {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot in progress").isEqualTo(false);
}
else {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot completed").isEqualTo(true);
}
}
stopConnector();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final SourceRecords sourceRecords = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = sourceRecords.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = sourceRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = i + ID_RESTART;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
assertThat(recordA.sourceOffset().get("snapshot")).as("Streaming phase").isNull();
assertThat(recordA.sourceOffset().get("snapshot_completed")).as("Streaming phase").isNull();
assertThat(recordA.sourceOffset().get("change_lsn")).as("LSN present").isNotNull();
assertThat(recordB.sourceOffset().get("snapshot")).as("Streaming phase").isNull();
assertThat(recordB.sourceOffset().get("snapshot_completed")).as("Streaming phase").isNull();
assertThat(recordB.sourceOffset().get("change_lsn")).as("LSN present").isNotNull();
}
}
@Test
public void testWhitelistTable() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_WHITELIST, "dbo.tableb")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
public void testTableIncludeList() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.tableb")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
public void testBlacklistTable() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_BLACKLIST, "dbo.tablea")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
public void testTableExcludeList() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 1;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_EXCLUDE_LIST, "dbo.tablea")
.build();
connection.execute(
"INSERT INTO tableb VALUES(1, 'b')");
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA == null || tableA.isEmpty()).isTrue();
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
@FixFor("DBZ-1617")
public void blacklistColumnWhenCdcColumnsDoNotMatchWithOriginalSnapshot() throws Exception {
connection.execute("CREATE TABLE table_a (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "table_a");
connection.execute("ALTER TABLE table_a ADD blacklisted_column varchar(30)");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_EXCLUDE_LIST, "dbo.table_a.blacklisted_column")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO table_a VALUES(10, 'some_name', 120, 'some_string')");
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name")
.put("amount", 120);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
stopConnector();
}
@Test
@FixFor("DBZ-1067")
public void testBlacklistColumn() throws Exception {
connection.execute(
"CREATE TABLE blacklist_column_table_a (id int, name varchar(30), amount integer primary key(id))",
"CREATE TABLE blacklist_column_table_b (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "blacklist_column_table_a");
TestHelper.enableTableCdc(connection, "blacklist_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_BLACKLIST, "dbo.blacklist_column_table_a.amount")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO blacklist_column_table_a VALUES(10, 'some_name', 120)");
connection.execute("INSERT INTO blacklist_column_table_b VALUES(11, 'some_name', 447)");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.blacklist_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.blacklist_column_table_b");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Schema expectedSchemaB = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_b.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueB = new Struct(expectedSchemaB)
.put("id", 11)
.put("name", "some_name")
.put("amount", 447);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
Assertions.assertThat(tableB).hasSize(1);
SourceRecordAssert.assertThat(tableB.get(0))
.valueAfterFieldIsEqualTo(expectedValueB)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaB);
stopConnector();
}
@Test
@FixFor("DBZ-1067")
public void testColumnExcludeList() throws Exception {
connection.execute(
"CREATE TABLE blacklist_column_table_a (id int, name varchar(30), amount integer primary key(id))",
"CREATE TABLE blacklist_column_table_b (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "blacklist_column_table_a");
TestHelper.enableTableCdc(connection, "blacklist_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_EXCLUDE_LIST, "dbo.blacklist_column_table_a.amount")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO blacklist_column_table_a VALUES(10, 'some_name', 120)");
connection.execute("INSERT INTO blacklist_column_table_b VALUES(11, 'some_name', 447)");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.blacklist_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.blacklist_column_table_b");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Schema expectedSchemaB = SchemaBuilder.struct()
.optional()
.name("server1.dbo.blacklist_column_table_b.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueB = new Struct(expectedSchemaB)
.put("id", 11)
.put("name", "some_name")
.put("amount", 447);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
Assertions.assertThat(tableB).hasSize(1);
SourceRecordAssert.assertThat(tableB.get(0))
.valueAfterFieldIsEqualTo(expectedValueB)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaB);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void testColumnIncludeList() throws Exception {
connection.execute(
"CREATE TABLE include_list_column_table_a (id int, name varchar(30), amount integer primary key(id))",
"CREATE TABLE include_list_column_table_b (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "include_list_column_table_a");
TestHelper.enableTableCdc(connection, "include_list_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_INCLUDE_LIST, ".*id,.*name,dbo.include_list_column_table_b.amount")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO include_list_column_table_a VALUES(10, 'some_name', 120)");
connection.execute("INSERT INTO include_list_column_table_b VALUES(11, 'some_name', 447)");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.include_list_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.include_list_column_table_b");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Schema expectedSchemaB = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_b.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("amount", Schema.OPTIONAL_INT32_SCHEMA)
.build();
Struct expectedValueB = new Struct(expectedSchemaB)
.put("id", 11)
.put("name", "some_name")
.put("amount", 447);
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldIsEqualTo(expectedValueA)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA);
Assertions.assertThat(tableB).hasSize(1);
SourceRecordAssert.assertThat(tableB.get(0))
.valueAfterFieldIsEqualTo(expectedValueB)
.valueAfterFieldSchemaIsEqualTo(expectedSchemaB);
stopConnector();
}
@Test
@FixFor("DBZ-1692")
public void shouldConsumeEventsWithMaskedHashedColumns() throws Exception {
connection.execute(
"CREATE TABLE masked_hashed_column_table_a (id int, name varchar(255) primary key(id))",
"CREATE TABLE masked_hashed_column_table_b (id int, name varchar(20), primary key(id))");
TestHelper.enableTableCdc(connection, "masked_hashed_column_table_a");
TestHelper.enableTableCdc(connection, "masked_hashed_column_table_b");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with("column.mask.hash.SHA-256.with.salt.CzQMA0cB5K", "testDB.dbo.masked_hashed_column_table_a.name, testDB.dbo.masked_hashed_column_table_b.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO masked_hashed_column_table_a VALUES(10, 'some_name')");
connection.execute("INSERT INTO masked_hashed_column_table_b VALUES(11, 'some_name')");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.masked_hashed_column_table_a");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.masked_hashed_column_table_b");
assertThat(tableA).hasSize(1);
SourceRecord record = tableA.get(0);
VerifyRecord.isValidInsert(record, "id", 10);
Struct value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("3b225d0696535d66f2c0fb2e36b012c520d396af3dd8f18330b9c9cd23ca714e");
}
assertThat(tableB).hasSize(1);
record = tableB.get(0);
VerifyRecord.isValidInsert(record, "id", 11);
value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("3b225d0696535d66f2c0");
}
stopConnector();
}
@Test
@FixFor("DBZ-1972")
public void shouldConsumeEventsWithMaskedAndTruncatedColumns() throws Exception {
connection.execute(
"CREATE TABLE masked_hashed_column_table (id int, name varchar(255) primary key(id))",
"CREATE TABLE truncated_column_table (id int, name varchar(20), primary key(id))");
TestHelper.enableTableCdc(connection, "masked_hashed_column_table");
TestHelper.enableTableCdc(connection, "truncated_column_table");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with("column.mask.with.12.chars", "testDB.dbo.masked_hashed_column_table.name")
.with("column.truncate.to.4.chars", "testDB.dbo.truncated_column_table.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
connection.execute("INSERT INTO masked_hashed_column_table VALUES(10, 'some_name')");
connection.execute("INSERT INTO truncated_column_table VALUES(11, 'some_name')");
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.masked_hashed_column_table");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.truncated_column_table");
assertThat(tableA).hasSize(1);
SourceRecord record = tableA.get(0);
VerifyRecord.isValidInsert(record, "id", 10);
Struct value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("************");
}
assertThat(tableB).hasSize(1);
record = tableB.get(0);
VerifyRecord.isValidInsert(record, "id", 11);
value = (Struct) record.value();
if (value.getStruct("after") != null) {
assertThat(value.getStruct("after").getString("name")).isEqualTo("some");
}
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void whenCaptureInstanceExcludesColumnsExpectSnapshotAndStreamingToExcludeColumns() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'a name', 100)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO excluded_column_table_a VALUES(11, 'some_name', 120)");
final SourceRecords records = consumeRecordsByTopic(3);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueSnapshot = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "a name");
Struct expectedValueStreaming = new Struct(expectedSchemaA)
.put("id", 11)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueSnapshot);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueStreaming);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void whenMultipleCaptureInstancesExcludesColumnsExpectLatestCDCTableUtilized() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'a name', 100)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
connection.execute("ALTER TABLE excluded_column_table_a ADD note varchar(30)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a_2",
Arrays.asList("id", "name", "note"));
final Configuration config = TestHelper.defaultConfig()
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO excluded_column_table_a VALUES(11, 'some_name', 120, 'a note')");
final SourceRecords records = consumeRecordsByTopic(3);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchema = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.field("note", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueSnapshot = new Struct(expectedSchema)
.put("id", 10)
.put("name", "a name")
.put("note", null);
Struct expectedValueStreaming = new Struct(expectedSchema)
.put("id", 11)
.put("name", "some_name")
.put("note", "a note");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchema)
.valueAfterFieldIsEqualTo(expectedValueSnapshot);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchema)
.valueAfterFieldIsEqualTo(expectedValueStreaming);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void whenCaptureInstanceExcludesColumnsAndColumnsRenamedExpectNoErrors() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'a name', 100)");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(RelationalDatabaseConnectorConfig.TABLE_INCLUDE_LIST, ".*excluded_column_table_a")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForStreamingRunning("sql_server", "server1");
TestHelper.disableTableCdc(connection, "excluded_column_table_a");
connection.execute("EXEC sp_RENAME 'excluded_column_table_a.name', 'first_name', 'COLUMN'");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "first_name"));
connection.execute("INSERT INTO excluded_column_table_a VALUES(11, 'some_name', 120)");
TestHelper.waitForCdcRecord(connection, "excluded_column_table_a", "dbo_excluded_column_table_a", rs -> rs.getInt("id") == 11);
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchema1 = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueSnapshot = new Struct(expectedSchema1)
.put("id", 10)
.put("name", "a name");
Schema expectedSchema2 = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("first_name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueStreaming = new Struct(expectedSchema2)
.put("id", 11)
.put("first_name", "some_name");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchema1)
.valueAfterFieldIsEqualTo(expectedValueSnapshot);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchema2)
.valueAfterFieldIsEqualTo(expectedValueStreaming);
stopConnector();
}
@Test
@FixFor("DBZ-1068")
public void excludeColumnWhenCaptureInstanceExcludesColumns() throws Exception {
connection.execute(
"CREATE TABLE excluded_column_table_a (id int, name varchar(30), amount integer primary key(id))");
TestHelper.enableTableCdc(connection, "excluded_column_table_a", "dbo_excluded_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
connection.execute("INSERT INTO excluded_column_table_a VALUES(10, 'some_name', 120)");
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.excluded_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.excluded_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void excludeColumnWhenCaptureInstanceExcludesColumnInMiddleOfTable() throws Exception {
connection.execute(
"CREATE TABLE exclude_list_column_table_a (id int, amount integer, name varchar(30), primary key(id))");
connection.execute("INSERT INTO exclude_list_column_table_a VALUES(10, 100, 'a name')");
TestHelper.enableTableCdc(connection, "exclude_list_column_table_a", "dbo_exclude_list_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(RelationalDatabaseConnectorConfig.TABLE_INCLUDE_LIST, ".*exclude_list_column_table_a")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO exclude_list_column_table_a VALUES(11, 120, 'some_name')");
TestHelper.waitForCdcRecord(connection, "exclude_list_column_table_a", rs -> rs.getInt("id") == 11);
final SourceRecords records = consumeRecordsByTopic(2);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.exclude_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.exclude_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValue1 = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "a name");
Struct expectedValue2 = new Struct(expectedSchemaA)
.put("id", 11)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(2);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValue1);
SourceRecordAssert.assertThat(tableA.get(1))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValue2);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void includeColumnsWhenCaptureInstanceExcludesColumnInMiddleOfTable() throws Exception {
connection.execute(
"CREATE TABLE include_list_column_table_a (id int, amount integer, name varchar(30), primary key(id))");
TestHelper.enableTableCdc(connection, "include_list_column_table_a", "dbo_include_list_column_table_a",
Arrays.asList("id", "name"));
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_INCLUDE_LIST, "dbo.include_list_column_table_a.id,dbo.include_list_column_table_a.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO include_list_column_table_a VALUES(10, 120, 'some_name')");
TestHelper.waitForCdcRecord(connection, "include_list_column_table_a", rs -> rs.getInt("id") == 10);
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.include_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void excludeMultipleColumnsWhenCaptureInstanceExcludesSingleColumn() throws Exception {
connection.execute(
"CREATE TABLE exclude_list_column_table_a (id int, amount integer, note varchar(30), name varchar(30), primary key(id))");
TestHelper.enableTableCdc(connection, "exclude_list_column_table_a", "dbo_exclude_list_column_table_a",
Arrays.asList("id", "note", "name"));
// Exclude the note column on top of the already excluded amount column
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_EXCLUDE_LIST, "dbo.exclude_list_column_table_a.amount,dbo.exclude_list_column_table_a.note")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO exclude_list_column_table_a VALUES(10, 120, 'a note', 'some_name')");
TestHelper.waitForCdcRecord(connection, "exclude_list_column_table_a", rs -> rs.getInt("id") == 10);
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.exclude_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.exclude_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
@Test
@FixFor("DBZ-2522")
public void includeMultipleColumnsWhenCaptureInstanceExcludesSingleColumn() throws Exception {
connection.execute(
"CREATE TABLE include_list_column_table_a (id int, amount integer, note varchar(30), name varchar(30), primary key(id))");
TestHelper.enableTableCdc(connection, "include_list_column_table_a", "dbo_include_list_column_table_a",
Arrays.asList("id", "note", "name"));
// Exclude the note column on top of the already excluded amount column
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.COLUMN_INCLUDE_LIST, "dbo.include_list_column_table_a.id,dbo.include_list_column_table_a.name")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO include_list_column_table_a VALUES(10, 120, 'a note', 'some_name')");
TestHelper.waitForCdcRecord(connection, "include_list_column_table_a", rs -> rs.getInt("id") == 10);
final SourceRecords records = consumeRecordsByTopic(1);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.include_list_column_table_a");
Schema expectedSchemaA = SchemaBuilder.struct()
.optional()
.name("server1.dbo.include_list_column_table_a.Value")
.field("id", Schema.INT32_SCHEMA)
.field("name", Schema.OPTIONAL_STRING_SCHEMA)
.build();
Struct expectedValueA = new Struct(expectedSchemaA)
.put("id", 10)
.put("name", "some_name");
Assertions.assertThat(tableA).hasSize(1);
SourceRecordAssert.assertThat(tableA.get(0))
.valueAfterFieldSchemaIsEqualTo(expectedSchemaA)
.valueAfterFieldIsEqualTo(expectedValueA);
stopConnector();
}
/**
* Passing the "applicationName" property which can be asserted from the connected sessions".
*/
@Test
@FixFor("DBZ-964")
public void shouldPropagateDatabaseDriverProperties() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with("database.applicationName", "Debezium App DBZ-964")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// consuming one record to make sure the connector establishes the DB connection which happens asynchronously
// after the start() call
connection.execute("INSERT INTO tablea VALUES(964, 'a')");
consumeRecordsByTopic(1);
connection.query("select count(1) from sys.dm_exec_sessions where program_name = 'Debezium App DBZ-964'", rs -> {
rs.next();
assertThat(rs.getInt(1)).isGreaterThanOrEqualTo(1);
});
}
private void restartInTheMiddleOfTx(boolean restartJustAfterSnapshot, boolean afterStreaming) throws Exception {
final int RECORDS_PER_TABLE = 30;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 1000;
final int HALF_ID = ID_START + RECORDS_PER_TABLE / 2;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
if (restartJustAfterSnapshot) {
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot to be completed
consumeRecordsByTopic(1);
stopConnector();
connection.execute("INSERT INTO tablea VALUES(-1, '-a')");
}
start(SqlServerConnector.class, config, record -> {
if (!"server1.dbo.tablea.Envelope".equals(record.valueSchema().name())) {
return false;
}
final Struct envelope = (Struct) record.value();
final Struct after = envelope.getStruct("after");
final Integer id = after.getInt32("id");
final String value = after.getString("cola");
return id != null && id == HALF_ID && "a".equals(value);
});
assertConnectorIsRunning();
// Wait for snapshot to be completed or a first streaming message delivered
consumeRecordsByTopic(1);
if (afterStreaming) {
connection.execute("INSERT INTO tablea VALUES(-2, '-a')");
final SourceRecords records = consumeRecordsByTopic(1);
final List<SchemaAndValueField> expectedRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, -2),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "-a"));
assertRecord(((Struct) records.allRecordsInOrder().get(0).value()).getStruct(Envelope.FieldName.AFTER), expectedRow);
}
connection.setAutoCommit(false);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.executeWithoutCommitting(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.executeWithoutCommitting(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
connection.connection().commit();
TestHelper.waitForCdcRecord(connection, "tablea", rs -> rs.getInt("id") == (ID_START + RECORDS_PER_TABLE - 1));
TestHelper.waitForCdcRecord(connection, "tableb", rs -> rs.getInt("id") == (ID_START + RECORDS_PER_TABLE - 1));
List<SourceRecord> records = consumeRecordsByTopic(RECORDS_PER_TABLE).allRecordsInOrder();
assertThat(records).hasSize(RECORDS_PER_TABLE);
SourceRecord lastRecordForOffset = records.get(RECORDS_PER_TABLE - 1);
Struct value = (Struct) lastRecordForOffset.value();
final List<SchemaAndValueField> expectedLastRow = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, HALF_ID - 1),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
assertRecord((Struct) value.get("after"), expectedLastRow);
stopConnector();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
SourceRecords sourceRecords = consumeRecordsByTopic(RECORDS_PER_TABLE);
records = sourceRecords.allRecordsInOrder();
assertThat(records).hasSize(RECORDS_PER_TABLE);
List<SourceRecord> tableA = sourceRecords.recordsForTopic("server1.dbo.tablea");
List<SourceRecord> tableB = sourceRecords.recordsForTopic("server1.dbo.tableb");
for (int i = 0; i < RECORDS_PER_TABLE / 2; i++) {
final int id = HALF_ID + i;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.executeWithoutCommitting(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.executeWithoutCommitting(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
connection.connection().commit();
}
TestHelper.waitForCdcRecord(connection, "tablea", rs -> rs.getInt("id") == (ID_RESTART + RECORDS_PER_TABLE - 1));
TestHelper.waitForCdcRecord(connection, "tableb", rs -> rs.getInt("id") == (ID_RESTART + RECORDS_PER_TABLE - 1));
sourceRecords = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
tableA = sourceRecords.recordsForTopic("server1.dbo.tablea");
tableB = sourceRecords.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = i + ID_RESTART;
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowA = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("cola", Schema.OPTIONAL_STRING_SCHEMA, "a"));
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, id),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
assertRecord((Struct) valueA.get("after"), expectedRowA);
assertNull(valueA.get("before"));
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
}
@Test
@FixFor("DBZ-1128")
public void restartInTheMiddleOfTxAfterSnapshot() throws Exception {
restartInTheMiddleOfTx(true, false);
}
@Test
@FixFor("DBZ-1128")
public void restartInTheMiddleOfTxAfterCompletedTx() throws Exception {
restartInTheMiddleOfTx(false, true);
}
@Test
@FixFor("DBZ-1128")
public void restartInTheMiddleOfTx() throws Exception {
restartInTheMiddleOfTx(false, false);
}
@Test
@FixFor("DBZ-1242")
public void testEmptySchemaWarningAfterApplyingFilters() throws Exception {
// This captures all logged messages, allowing us to verify log message was written.
final LogInterceptor logInterceptor = new LogInterceptor();
Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "my_products")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForAvailableRecords(100, TimeUnit.MILLISECONDS);
stopConnector(value -> assertThat(logInterceptor.containsWarnMessage(DatabaseSchema.NO_CAPTURED_DATA_COLLECTIONS_WARNING)).isTrue());
}
@Test
@FixFor("DBZ-1242")
public void testNoEmptySchemaWarningAfterApplyingFilters() throws Exception {
// This captures all logged messages, allowing us to verify log message was written.
final LogInterceptor logInterceptor = new LogInterceptor();
Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForAvailableRecords(100, TimeUnit.MILLISECONDS);
stopConnector(value -> assertThat(logInterceptor.containsWarnMessage(DatabaseSchema.NO_CAPTURED_DATA_COLLECTIONS_WARNING)).isFalse());
}
@Test
@FixFor("DBZ-916")
public void keylessTable() throws Exception {
connection.execute(
"CREATE TABLE keyless (id int, name varchar(30))",
"INSERT INTO keyless VALUES(1, 'k')");
TestHelper.enableTableCdc(connection, "keyless");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.keyless")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
final List<SchemaAndValueField> key = Arrays.asList(
new SchemaAndValueField("id", Schema.OPTIONAL_INT32_SCHEMA, 1),
new SchemaAndValueField("name", Schema.OPTIONAL_STRING_SCHEMA, "k"));
final List<SchemaAndValueField> key2 = Arrays.asList(
new SchemaAndValueField("id", Schema.OPTIONAL_INT32_SCHEMA, 2),
new SchemaAndValueField("name", Schema.OPTIONAL_STRING_SCHEMA, "k"));
final List<SchemaAndValueField> key3 = Arrays.asList(
new SchemaAndValueField("id", Schema.OPTIONAL_INT32_SCHEMA, 3),
new SchemaAndValueField("name", Schema.OPTIONAL_STRING_SCHEMA, "k"));
// Wait for snapshot completion
SourceRecords records = consumeRecordsByTopic(1);
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).keySchema()).isNull();
connection.execute(
"INSERT INTO keyless VALUES(2, 'k')");
records = consumeRecordsByTopic(1);
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
connection.execute(
"UPDATE keyless SET id=3 WHERE ID=2");
records = consumeRecordsByTopic(3);
final SourceRecord update1 = records.recordsForTopic("server1.dbo.keyless").get(0);
assertThat(update1.key()).isNull();
assertThat(update1.keySchema()).isNull();
assertRecord(((Struct) update1.value()).getStruct(Envelope.FieldName.BEFORE), key2);
assertRecord(((Struct) update1.value()).getStruct(Envelope.FieldName.AFTER), key3);
connection.execute(
"DELETE FROM keyless WHERE id=3");
records = consumeRecordsByTopic(2, false);
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).key()).isNull();
assertThat(records.recordsForTopic("server1.dbo.keyless").get(0).keySchema()).isNull();
assertNull(records.recordsForTopic("server1.dbo.keyless").get(1).value());
stopConnector();
}
@Test
@FixFor("DBZ-1015")
public void shouldRewriteIdentityKey() throws InterruptedException, SQLException {
connection.execute(
"CREATE TABLE keyless (id int, name varchar(30))",
"INSERT INTO keyless VALUES(1, 'k')");
TestHelper.enableTableCdc(connection, "keyless");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.keyless")
// rewrite key from table 'products': from {null} to {id}
.with(SqlServerConnectorConfig.MSG_KEY_COLUMNS, "(.*).keyless:id")
.build();
start(SqlServerConnector.class, config);
SourceRecords records = consumeRecordsByTopic(1);
List<SourceRecord> recordsForTopic = records.recordsForTopic("server1.dbo.keyless");
assertThat(recordsForTopic.get(0).key()).isNotNull();
Struct key = (Struct) recordsForTopic.get(0).key();
Assertions.assertThat(key.get("id")).isNotNull();
stopConnector();
}
@Test
@FixFor("DBZ-1491")
public void shouldCaptureTableSchema() throws SQLException, InterruptedException {
connection.execute(
"CREATE TABLE table_schema_test (key_cola int not null,"
+ "key_colb varchar(10) not null,"
+ "cola int not null,"
+ "colb datetimeoffset not null default ('2019-01-01 12:34:56.1234567+04:00'),"
+ "colc varchar(20) default ('default_value'),"
+ "cold float,"
+ "primary key(key_cola, key_colb))");
TestHelper.enableTableCdc(connection, "table_schema_test");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
TestHelper.waitForSnapshotToBeCompleted();
connection.execute(
"INSERT INTO table_schema_test (key_cola, key_colb, cola, colb, colc, cold) VALUES(1, 'a', 100, '2019-01-01 10:20:39.1234567 +02:00', 'some_value', 100.20)");
List<SourceRecord> records = consumeRecordsByTopic(1).recordsForTopic("server1.dbo.table_schema_test");
assertThat(records).hasSize(1);
SourceRecordAssert.assertThat(records.get(0))
.keySchemaIsEqualTo(SchemaBuilder.struct()
.name("server1.dbo.table_schema_test.Key")
.field("key_cola", Schema.INT32_SCHEMA)
.field("key_colb", Schema.STRING_SCHEMA)
.build())
.valueAfterFieldSchemaIsEqualTo(SchemaBuilder.struct()
.optional()
.name("server1.dbo.table_schema_test.Value")
.field("key_cola", Schema.INT32_SCHEMA)
.field("key_colb", Schema.STRING_SCHEMA)
.field("cola", Schema.INT32_SCHEMA)
.field("colb",
SchemaBuilder.string().name("io.debezium.time.ZonedTimestamp").required().defaultValue("2019-01-01T12:34:56.1234567+04:00").version(1)
.build())
.field("colc", SchemaBuilder.string().optional().defaultValue("default_value").build())
.field("cold", Schema.OPTIONAL_FLOAT64_SCHEMA)
.build());
stopConnector();
}
@Test
@FixFor("DBZ-1923")
public void shouldDetectPurgedHistory() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final int ID_RESTART = 100;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.DATABASE_HISTORY, PurgableFileDatabaseHistory.class)
.build();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute("INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute("INSERT INTO tableb VALUES(" + id + ", 'b')");
}
Awaitility.await().atMost(30, TimeUnit.SECONDS).pollInterval(100, TimeUnit.MILLISECONDS).until(() -> {
Testing.debug("Waiting for initial changes to be propagated to CDC structures");
return connection.getMaxLsn().isAvailable();
});
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
List<SourceRecord> records = consumeRecordsByTopic(1 + RECORDS_PER_TABLE * TABLES).allRecordsInOrder();
records = records.subList(1, records.size());
for (Iterator<SourceRecord> it = records.iterator(); it.hasNext();) {
SourceRecord record = it.next();
assertThat(record.sourceOffset().get("snapshot")).as("Snapshot phase").isEqualTo(true);
if (it.hasNext()) {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot in progress").isEqualTo(false);
}
else {
assertThat(record.sourceOffset().get("snapshot_completed")).as("Snapshot completed").isEqualTo(true);
}
}
stopConnector();
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_RESTART + i;
connection.execute("INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute("INSERT INTO tableb VALUES(" + id + ", 'b')");
}
Testing.Files.delete(TestHelper.DB_HISTORY_PATH);
final LogInterceptor logInterceptor = new LogInterceptor();
start(SqlServerConnector.class, config);
assertConnectorNotRunning();
assertThat(logInterceptor.containsStacktraceElement(
"The db history topic or its content is fully or partially missing. Please check database history topic configuration and re-execute the snapshot."))
.isTrue();
}
@Test
@FixFor("DBZ-1988")
public void shouldHonorSourceTimestampMode() throws InterruptedException, SQLException {
connection.execute("CREATE TABLE source_timestamp_mode (id int, name varchar(30) primary key(id))");
TestHelper.enableTableCdc(connection, "source_timestamp_mode");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.source_timestamp_mode")
.with(SqlServerConnectorConfig.SOURCE_TIMESTAMP_MODE, "processing")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO source_timestamp_mode VALUES(1, 'abc')");
SourceRecords records = consumeRecordsByTopic(1);
List<SourceRecord> recordsForTopic = records.recordsForTopic("server1.dbo.source_timestamp_mode");
SourceRecord record = recordsForTopic.get(0);
long eventTs = (long) ((Struct) record.value()).get("ts_ms");
long sourceTs = (long) ((Struct) ((Struct) record.value()).get("source")).get("ts_ms");
// it's not exactly the same as ts_ms, but close enough;
assertThat(eventTs - sourceTs).isLessThan(100);
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useShortTableNamesForColumnMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with("column.mask.with.4.chars", "dbo.tablea.cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
Assertions.assertThat(valueA.getStruct("after").getString("cola")).isEqualTo("****");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useLongTableNamesForColumnMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with("column.mask.with.4.chars", "testDB.dbo.tablea.cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct valueA = (Struct) recordA.value();
Assertions.assertThat(valueA.getStruct("after").getString("cola")).isEqualTo("****");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useLongTableNamesForKeyMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.MSG_KEY_COLUMNS, "testDB.dbo.tablea:cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
Assertions.assertThat(keyA.getString("cola")).isEqualTo("a");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor("DBZ-1312")
public void useShortTableNamesForKeyMapper() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL)
.with(SqlServerConnectorConfig.MSG_KEY_COLUMNS, "dbo.tablea:cola")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final SourceRecord recordA = tableA.get(i);
final SourceRecord recordB = tableB.get(i);
final List<SchemaAndValueField> expectedRowB = Arrays.asList(
new SchemaAndValueField("id", Schema.INT32_SCHEMA, i + ID_START),
new SchemaAndValueField("colb", Schema.OPTIONAL_STRING_SCHEMA, "b"));
final Struct keyA = (Struct) recordA.key();
Assertions.assertThat(keyA.getString("cola")).isEqualTo("a");
final Struct valueB = (Struct) recordB.value();
assertRecord((Struct) valueB.get("after"), expectedRowB);
assertNull(valueB.get("before"));
}
stopConnector();
}
@Test
@FixFor({ "DBZ-1916", "DBZ-1830" })
public void shouldPropagateSourceTypeByDatatype() throws Exception {
connection.execute("CREATE TABLE dt_table (id int, c1 int, c2 int, c3a numeric(5,2), c3b varchar(128), f1 float(10), f2 decimal(8,4) primary key(id))");
TestHelper.enableTableCdc(connection, "dt_table");
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.TABLE_INCLUDE_LIST, "dbo.dt_table")
.with("datatype.propagate.source.type", ".+\\.NUMERIC,.+\\.VARCHAR,.+\\.REAL,.+\\.DECIMAL")
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
waitForSnapshotToBeCompleted("sql_server", "server1");
connection.execute("INSERT INTO dt_table (id,c1,c2,c3a,c3b,f1,f2) values (1, 123, 456, 789.01, 'test', 1.228, 234.56)");
SourceRecords records = consumeRecordsByTopic(1);
List<SourceRecord> recordsForTopic = records.recordsForTopic("server1.dbo.dt_table");
final SourceRecord record = recordsForTopic.get(0);
final Field before = record.valueSchema().field("before");
assertThat(before.schema().field("id").schema().parameters()).isNull();
assertThat(before.schema().field("c1").schema().parameters()).isNull();
assertThat(before.schema().field("c2").schema().parameters()).isNull();
assertThat(before.schema().field("c3a").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "NUMERIC"),
entry(TYPE_LENGTH_PARAMETER_KEY, "5"),
entry(TYPE_SCALE_PARAMETER_KEY, "2"));
assertThat(before.schema().field("c3b").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "VARCHAR"),
entry(TYPE_LENGTH_PARAMETER_KEY, "128"));
assertThat(before.schema().field("f2").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "DECIMAL"),
entry(TYPE_LENGTH_PARAMETER_KEY, "8"),
entry(TYPE_SCALE_PARAMETER_KEY, "4"));
assertThat(before.schema().field("f1").schema().parameters()).includes(
entry(TYPE_NAME_PARAMETER_KEY, "REAL"),
entry(TYPE_LENGTH_PARAMETER_KEY, "24"));
stopConnector();
}
@Test
@FixFor("DBZ-2379")
public void shouldNotStreamWhenUsingSnapshotModeInitialOnly() throws Exception {
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL_ONLY)
.build();
final LogInterceptor logInterceptor = new LogInterceptor();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
// should be no more records
assertNoRecordsToConsume();
final String message = "Streaming is not enabled in current configuration";
stopConnector(value -> assertThat(logInterceptor.containsMessage(message)).isTrue());
}
@Test
@FixFor("DBZ-2582")
public void testMaxLsnSelectStatementWithDefault() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
@Test
@FixFor("DBZ-2582")
public void testMaxLsnSelectStatementWithFalse() throws Exception {
final int RECORDS_PER_TABLE = 5;
final int TABLES = 2;
final int ID_START = 10;
final Configuration config = TestHelper.defaultConfig()
.with(SqlServerConnectorConfig.SNAPSHOT_MODE, SnapshotMode.SCHEMA_ONLY)
.with(SqlServerConnectorConfig.MAX_LSN_OPTIMIZATION, false)
.build();
start(SqlServerConnector.class, config);
assertConnectorIsRunning();
// Wait for snapshot completion
consumeRecordsByTopic(1);
for (int i = 0; i < RECORDS_PER_TABLE; i++) {
final int id = ID_START + i;
connection.execute(
"INSERT INTO tablea VALUES(" + id + ", 'a')");
connection.execute(
"INSERT INTO tableb VALUES(" + id + ", 'b')");
}
final SourceRecords records = consumeRecordsByTopic(RECORDS_PER_TABLE * TABLES);
final List<SourceRecord> tableA = records.recordsForTopic("server1.dbo.tablea");
final List<SourceRecord> tableB = records.recordsForTopic("server1.dbo.tableb");
Assertions.assertThat(tableA).hasSize(RECORDS_PER_TABLE);
Assertions.assertThat(tableB).hasSize(RECORDS_PER_TABLE);
stopConnector();
}
private void assertRecord(Struct record, List<SchemaAndValueField> expected) {
expected.forEach(schemaAndValueField -> schemaAndValueField.assertFor(record));
}
public static class PurgableFileDatabaseHistory implements DatabaseHistory {
final DatabaseHistory delegate = new FileDatabaseHistory();
@Override
public boolean exists() {
try {
return storageExists() && java.nio.file.Files.size(TestHelper.DB_HISTORY_PATH) > 0;
}
catch (IOException e) {
throw new DatabaseHistoryException("File should exist");
}
}
@Override
public void configure(Configuration config, HistoryRecordComparator comparator,
DatabaseHistoryListener listener, boolean useCatalogBeforeSchema) {
delegate.configure(config, comparator, listener, useCatalogBeforeSchema);
}
@Override
public void start() {
delegate.start();
}
@Override
public void record(Map<String, ?> source, Map<String, ?> position, String databaseName, String ddl)
throws DatabaseHistoryException {
delegate.record(source, position, databaseName, ddl);
}
@Override
public void record(Map<String, ?> source, Map<String, ?> position, String databaseName, String schemaName,
String ddl, TableChanges changes)
throws DatabaseHistoryException {
delegate.record(source, position, databaseName, schemaName, ddl, changes);
}
@Override
public void recover(Map<String, ?> source, Map<String, ?> position, Tables schema, DdlParser ddlParser) {
delegate.recover(source, position, schema, ddlParser);
}
@Override
public void stop() {
delegate.stop();
}
@Override
public boolean storageExists() {
return delegate.storageExists();
}
@Override
public void initializeStorage() {
delegate.initializeStorage();
}
}
}
|
DBZ-2747 Disable unstable corner case test
|
debezium-connector-sqlserver/src/test/java/io/debezium/connector/sqlserver/SqlServerConnectorIT.java
|
DBZ-2747 Disable unstable corner case test
|
|
Java
|
apache-2.0
|
dc1c081ae803671bf827474b961717366a2a00b5
| 0
|
watchrabbit/rabbit-crawler
|
/*
* Copyright 2015 Mariusz.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.watchrabbit.crawler.executor.service;
import com.watchrabbit.commons.clock.Stopwatch;
import com.watchrabbit.crawler.api.CrawlForm;
import com.watchrabbit.crawler.api.CrawlResult;
import com.watchrabbit.crawler.api.LinkDto;
import com.watchrabbit.crawler.driver.factory.RemoteWebDriverFactory;
import com.watchrabbit.crawler.driver.service.LoaderService;
import com.watchrabbit.crawler.executor.facade.AuthServiceFacade;
import com.watchrabbit.crawler.executor.facade.ManagerServiceFacade;
import com.watchrabbit.crawler.executor.listener.CrawlListener;
import com.watchrabbit.crawler.executor.strategy.KeywordGenerateStrategy;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static java.util.stream.Collectors.toList;
import org.apache.commons.lang.StringUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* @author Mariusz
*/
@Service
public class CrawlExecutorServiceImpl implements CrawlExecutorService {
private static final Logger LOGGER = LoggerFactory.getLogger(CrawlExecutorServiceImpl.class);
@Autowired
AuthServiceFacade authServiceFacade;
@Autowired
RemoteWebDriverFactory remoteWebDriverFactory;
@Autowired
ManagerServiceFacade managerServiceFacade;
@Autowired
LoaderService loaderService;
@Autowired
KeywordGenerateStrategy keywordGenerateStrategy;
@Autowired(required = false)
CrawlListener crawlListener = driver -> 0;
@Override
public void processPage(CrawlForm form) {
Collection<Cookie> session = authServiceFacade.getSession(form.getDomain());
RemoteWebDriver driver = remoteWebDriverFactory.produceDriver();
try {
Stopwatch stopwatch = Stopwatch.createStarted(() -> enableSession(driver, form, session));
LOGGER.debug("Finished loading {} in {}", form.getUrl(), stopwatch.getExecutionTime(TimeUnit.MILLISECONDS));
List<LinkDto> links = collectLinks(driver).stream()
.map(link -> new LinkDto.Builder()
.withUrl(link)
.build()
).collect(toList());
if (form.isGateway()) {
LOGGER.debug("Processing gateway {}", form.getUrl());
List<String> keywords = keywordGenerateStrategy.generateKeywords(form, driver);
links.addAll(
keywords.stream()
.map(keyword -> new LinkDto.Builder()
.withKeyword(keyword)
.withUrl(form.getUrl())
.build()
).collect(toList())
);
}
double importanceFactor = crawlListener.accept(driver);
managerServiceFacade.consumeResult(new CrawlResult.Builder()
.withDomain(form.getDomain())
.withMiliseconds(stopwatch.getExecutionTime(TimeUnit.MILLISECONDS))
.withUrl(form.getUrl())
.withLinks(links)
.withId(form.getId())
.withImportanceFactor(importanceFactor)
.build()
);
} catch (Exception ex) {
LOGGER.error("Execption on processing page " + form.getUrl(), ex);
managerServiceFacade.onError(form);
} finally {
remoteWebDriverFactory.returnWebDriver(driver);
}
}
private void enableSession(RemoteWebDriver driver, CrawlForm form, Collection<Cookie> session) {
driver.get(form.getUrl());
loaderService.waitFor(driver);
if (!session.isEmpty()) {
driver.manage().deleteAllCookies();
session.forEach(driver.manage()::addCookie);
driver.get(form.getUrl());
loaderService.waitFor(driver);
}
if (StringUtils.isNotEmpty(form.getKeyword())) {
Optional<SearchForm> searchFormOptional = findSearchInput(driver);
searchFormOptional.ifPresent(searchForm -> {
searchForm.input.sendKeys(form.getKeyword());
loaderService.waitFor(driver);
searchForm.submit.click();
loaderService.waitFor(driver);
});
}
}
private List<String> collectLinks(RemoteWebDriver driver) {
return driver.findElements(By.xpath("//a")).stream()
.filter(element -> element.isDisplayed())
.map(link -> link.getAttribute("href"))
.filter(link -> link != null)
.filter(link -> link.startsWith("http"))
.distinct()
.collect(toList());
}
private Optional<SearchForm> findSearchInput(RemoteWebDriver driver) {
for (WebElement form : driver.findElements(By.xpath("//form"))) {
LOGGER.debug("Looking to form with action {}", form.getAttribute("action"));
List<WebElement> inputs = form.findElements(By.xpath(".//input")).stream()
.filter(input -> input.getAttribute("type").equals("text"))
.filter(input -> input.isDisplayed())
.collect(toList());
if (inputs.size() == 1) {
List<WebElement> submit = form.findElements(By.xpath(".//button[@type='submit']"));
if (submit.isEmpty()) {
submit = form.findElements(By.xpath(".//input[@type='submit']"));
}
if (submit.size() == 1) {
return Optional.of(new SearchForm(inputs.get(0), submit.get(0)));
}
}
}
LOGGER.error("Cannot find form in gateway page");
return Optional.<SearchForm>empty();
}
private class SearchForm {
WebElement input;
WebElement submit;
public SearchForm(WebElement input, WebElement submit) {
this.input = input;
this.submit = submit;
}
}
}
|
crawler-executor/src/main/java/com/watchrabbit/crawler/executor/service/CrawlExecutorServiceImpl.java
|
/*
* Copyright 2015 Mariusz.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.watchrabbit.crawler.executor.service;
import com.watchrabbit.commons.clock.Stopwatch;
import com.watchrabbit.crawler.api.CrawlForm;
import com.watchrabbit.crawler.api.CrawlResult;
import com.watchrabbit.crawler.api.LinkDto;
import com.watchrabbit.crawler.driver.factory.RemoteWebDriverFactory;
import com.watchrabbit.crawler.driver.service.LoaderService;
import com.watchrabbit.crawler.executor.facade.AuthServiceFacade;
import com.watchrabbit.crawler.executor.facade.ManagerServiceFacade;
import com.watchrabbit.crawler.executor.listener.CrawlListener;
import com.watchrabbit.crawler.executor.strategy.KeywordGenerateStrategy;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import static java.util.stream.Collectors.toList;
import org.apache.commons.lang.StringUtils;
import org.openqa.selenium.By;
import org.openqa.selenium.Cookie;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* @author Mariusz
*/
@Service
public class CrawlExecutorServiceImpl implements CrawlExecutorService {
private static final Logger LOGGER = LoggerFactory.getLogger(CrawlExecutorServiceImpl.class);
@Autowired
AuthServiceFacade authServiceFacade;
@Autowired
RemoteWebDriverFactory remoteWebDriverFactory;
@Autowired
ManagerServiceFacade managerServiceFacade;
@Autowired
LoaderService loaderService;
@Autowired
KeywordGenerateStrategy keywordGenerateStrategy;
@Autowired(required = false)
CrawlListener crawlListener = driver -> 0;
@Override
public void processPage(CrawlForm form) {
Collection<Cookie> session = authServiceFacade.getSession(form.getDomain());
RemoteWebDriver driver = remoteWebDriverFactory.produceDriver();
try {
Stopwatch stopwatch = Stopwatch.createStarted(() -> enableSession(driver, form, session));
LOGGER.debug("Finished loading {} in {}", form.getUrl(), stopwatch.getExecutionTime(TimeUnit.MILLISECONDS));
List<LinkDto> links = collectLinks(driver).stream()
.map(link -> new LinkDto.Builder()
.withUrl(link)
.build()
).collect(toList());
if (form.isGateway()) {
LOGGER.debug("Processing gateway {}", form.getUrl());
List<String> keywords = keywordGenerateStrategy.generateKeywords(form, driver);
links.addAll(
keywords.stream()
.map(keyword -> new LinkDto.Builder()
.withKeyword(keyword)
.withUrl(form.getUrl())
.build()
).collect(toList())
);
}
double importanceFactor = crawlListener.accept(driver);
managerServiceFacade.consumeResult(new CrawlResult.Builder()
.withDomain(form.getDomain())
.withMiliseconds(stopwatch.getExecutionTime(TimeUnit.MILLISECONDS))
.withUrl(form.getUrl())
.withLinks(links)
.withId(form.getId())
.withImportanceFactor(importanceFactor)
.build()
);
} catch (Exception ex) {
managerServiceFacade.onError(form);
} finally {
remoteWebDriverFactory.returnWebDriver(driver);
}
}
private void enableSession(RemoteWebDriver driver, CrawlForm form, Collection<Cookie> session) {
driver.get(form.getUrl());
loaderService.waitFor(driver);
if (!session.isEmpty()) {
driver.manage().deleteAllCookies();
session.forEach(driver.manage()::addCookie);
driver.get(form.getUrl());
loaderService.waitFor(driver);
}
if (StringUtils.isNotEmpty(form.getKeyword())) {
Optional<SearchForm> searchFormOptional = findSearchInput(driver);
searchFormOptional.ifPresent(searchForm -> {
searchForm.input.sendKeys(form.getKeyword());
loaderService.waitFor(driver);
searchForm.submit.click();
loaderService.waitFor(driver);
});
}
}
private List<String> collectLinks(RemoteWebDriver driver) {
return driver.findElements(By.xpath("//a")).stream()
.filter(element -> element.isDisplayed())
.map(link -> link.getAttribute("href"))
.filter(link -> link != null)
.filter(link -> link.startsWith("http"))
.distinct()
.collect(toList());
}
private Optional<SearchForm> findSearchInput(RemoteWebDriver driver) {
for (WebElement form : driver.findElements(By.xpath("//form"))) {
LOGGER.debug("Looking to form with action {}", form.getAttribute("action"));
List<WebElement> inputs = form.findElements(By.xpath(".//input")).stream()
.filter(input -> input.getAttribute("type").equals("text"))
.filter(input -> input.isDisplayed())
.collect(toList());
if (inputs.size() == 1) {
List<WebElement> submit = form.findElements(By.xpath(".//button[@type='submit']"));
if (submit.isEmpty()) {
submit = form.findElements(By.xpath(".//input[@type='submit']"));
}
if (submit.size() == 1) {
return Optional.of(new SearchForm(inputs.get(0), submit.get(0)));
}
}
}
LOGGER.error("Cannot find form in gateway page");
return Optional.<SearchForm>empty();
}
private class SearchForm {
WebElement input;
WebElement submit;
public SearchForm(WebElement input, WebElement submit) {
this.input = input;
this.submit = submit;
}
}
}
|
Implemented endpoint
|
crawler-executor/src/main/java/com/watchrabbit/crawler/executor/service/CrawlExecutorServiceImpl.java
|
Implemented endpoint
|
|
Java
|
apache-2.0
|
b2465fe74262fcf11e60d51dbbbf3685d897c27a
| 0
|
olehmberg/winter
|
/*
* Copyright (c) 2017 Data and Web Science Group, University of Mannheim, Germany (http://dws.informatik.uni-mannheim.de/)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package de.uni_mannheim.informatik.dws.winter.webtables.app;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger;
import com.beust.jcommander.Parameter;
import au.com.bytecode.opencsv.CSVWriter;
import de.uni_mannheim.informatik.dws.winter.utils.Executable;
import de.uni_mannheim.informatik.dws.winter.utils.ProgressReporter;
import de.uni_mannheim.informatik.dws.winter.utils.WinterLogManager;
import de.uni_mannheim.informatik.dws.winter.utils.query.Func;
import de.uni_mannheim.informatik.dws.winter.utils.query.Q;
import de.uni_mannheim.informatik.dws.winter.webtables.Table;
import de.uni_mannheim.informatik.dws.winter.webtables.TableColumn;
import de.uni_mannheim.informatik.dws.winter.webtables.TableContext;
import de.uni_mannheim.informatik.dws.winter.webtables.TableRow;
import de.uni_mannheim.informatik.dws.winter.webtables.parsers.CsvTableParser;
import de.uni_mannheim.informatik.dws.winter.webtables.parsers.JsonTableParser;
import de.uni_mannheim.informatik.dws.winter.webtables.preprocessing.TableDisambiguationExtractor;
import de.uni_mannheim.informatik.dws.winter.webtables.preprocessing.TableNumberingExtractor;
import de.uni_mannheim.informatik.dws.winter.webtables.writers.JsonTableWriter;
/**
* @author Oliver Lehmberg (oli@dwslab.de)
*
*/
public class ShowTableData extends Executable {
@Parameter(names = "-d")
private boolean showData = false;
@Parameter(names = "-w")
private int columnWidth = 20;
@Parameter(names = "-keyColumnIndex")
private Integer keyColumnIndex = null;
@Parameter(names = "-convertValues")
private boolean convertValues = false;
@Parameter(names = "-update")
private boolean update = false;
@Parameter(names = "-detectKey")
private boolean detectKey = false;
@Parameter(names = "-listColumnIds")
private boolean listColumnIds;
@Parameter(names = "-header")
private boolean showHeader = false;
@Parameter(names = "-rows")
private int numRows = 0;
@Parameter(names = "-csv")
private boolean createCSV = false;
@Parameter(names = "-dep")
private boolean showDependencyInfo = false;
@Parameter(names = "-prov")
private boolean showProvenanceInfo = false;
@Parameter(names = "-pre")
private boolean applyPreprocessing = false;
private static final Logger logger = WinterLogManager.getLogger();
public static void main(String[] args) throws IOException {
ShowTableData s = new ShowTableData();
if(s.parseCommandLine(ShowTableData.class, args) && s.getParams()!=null) {
s.run();
}
}
public void run() throws IOException {
JsonTableParser p = new JsonTableParser();
JsonTableWriter w = new JsonTableWriter();
// p.setConvertValues(convertValues | detectKey);
CsvTableParser csvP = new CsvTableParser();
// csvP.setConvertValues(convertValues | detectKey);
String[] files = getParams().toArray(new String[getParams().size()]);
File dir = null;
if(files.length==1) {
dir = new File(files[0]);
if(dir.isDirectory()) {
files = dir.list();
} else {
dir = null;
}
}
ProgressReporter prg = new ProgressReporter(files.length, "Processing Tables");
CSVWriter csvW = null;
if(createCSV) {
csvW = new CSVWriter(new OutputStreamWriter(System.out));
}
for(String s : files) {
Table t = null;
File f = new File(s);
if(dir!=null) {
f = new File(dir,s);
}
try {
if(s.endsWith("json")) {
t = p.parseTable(f);
} else if(s.endsWith("csv")) {
t = csvP.parseTable(f);
} else {
logger.error(String.format("Unknown table format '%s' (must be .json or .csv)", f.getName()));
continue;
}
if(applyPreprocessing) {
new TableDisambiguationExtractor().extractDisambiguations(Q.toList(t));
new TableNumberingExtractor().extractNumbering(Q.toList(t));
}
if(convertValues) {
t.convertValues();
}
// update the table if requested
if(detectKey) {
t.identifySubjectColumn(0.3,true);
logger.error(String.format("* Detected Entity-Label Column: %s", t.getSubjectColumn()==null ? "?" : t.getSubjectColumn().getHeader()));
}
if(keyColumnIndex!=null) {
logger.error(String.format("* Setting Entity-Label Column: %s", t.getSchema().get(keyColumnIndex)));
t.setSubjectColumnIndex(keyColumnIndex);
}
if(update) {
w.write(t, f);
}
if(createCSV) {
// create a csv file with the table meta data
csvW.writeNext(new String[] {
s,
Integer.toString(t.getRows().size()),
Integer.toString(t.getColumns().size()),
t.getContext()==null ? "" : t.getContext().getUrl(),
t.getContext()==null ? "" : t.getContext().getPageTitle(),
t.getContext()==null ? "" : t.getContext().getTableTitle(),
Integer.toString(getOriginalTables(t).size()),
t.getSubjectColumn()==null ? "" : Integer.toString(t.getSubjectColumn().getColumnIndex())
});
} else if(listColumnIds) {
// list the columns in the table
for(TableColumn c : t.getColumns()) {
if(!showHeader) {
System.out.println(c.getIdentifier());
} else {
System.out.println(c.toString());
}
}
} else {
// print the table meta data in human readable format
TableContext ctx = t.getContext();
System.out.println(String.format("*** Table %s ***", s));
if(ctx!=null) {
System.out.println(String.format("* URL: %s", ctx.getUrl()));
System.out.println(String.format("* Title: %s", ctx.getPageTitle()));
System.out.println(String.format("* Heading: %s", ctx.getTableTitle()));
}
System.out.println(String.format("* # Columns: %d", t.getColumns().size()));
System.out.println(String.format("* # Rows: %d", t.getRows().size()));
System.out.println(String.format("* Created from %d original tables", getOriginalTables(t).size()));
System.out.println(String.format("* Entity-Label Column: %s", t.getSubjectColumn()==null ? "?" : t.getSubjectColumn().getHeader()));
if(showProvenanceInfo) {
// collect all provenance data
Set<String> provenance = getOriginalTables(t);
if(provenance.size()>0) {
System.out.println("Provenance:");
System.out.println(String.format("\t%s",
StringUtils.join(Q.sort(provenance), ",")
));
} else {
System.out.println("Table has no provenance data attached.");
}
}
if(showDependencyInfo) {
if(t.getSchema().getFunctionalDependencies()!=null && t.getSchema().getFunctionalDependencies().size()>0) {
System.out.println("*** Functional Dependencies ***");
for(Collection<TableColumn> det : t.getSchema().getFunctionalDependencies().keySet()) {
Collection<TableColumn> dep = t.getSchema().getFunctionalDependencies().get(det);
System.out.println(
String.format(
"{%s}->{%s}",
StringUtils.join(Q.project(det, new TableColumn.ColumnHeaderProjection()), ","),
StringUtils.join(Q.project(dep, new TableColumn.ColumnHeaderProjection()), ",")));
}
}
if(t.getSchema().getCandidateKeys()!=null && t.getSchema().getCandidateKeys().size()>0) {
System.out.println("*** Candidate Keys ***");
for(Collection<TableColumn> candidateKey : t.getSchema().getCandidateKeys()) {
System.out.println(
String.format("{%s}", StringUtils.join(Q.project(candidateKey, new TableColumn.ColumnHeaderProjection()), ",")));
}
}
}
if(showData) {
System.out.println(t.getSchema().format(columnWidth));
System.out.println(t.getSchema().formatDataTypes(columnWidth));
int maxRows = Math.min(numRows, t.getRows().size());
if(maxRows==0) {
maxRows = t.getRows().size();
}
for(int i = 0; i < maxRows; i++) {
TableRow r = t.getRows().get(i);
if(showProvenanceInfo) {
System.out.println(StringUtils.join(r.getProvenance(), " / "));
}
System.out.println(r.format(columnWidth));
}
} else {
System.out.println(StringUtils.join(Q.project(t.getColumns(),
new Func<String, TableColumn>() {
@Override
public String invoke(TableColumn in) {
return String.format("%s (%s)", in.getHeader(), in.getDataType());
}}
), ", "));
}
prg.incrementProgress();
prg.report();
}
} catch(Exception e) {
System.err.println(String.format("Cannot process table '%s'!",f));
e.printStackTrace();
}
}
if(createCSV) {
csvW.close();
}
}
private Set<String> getOriginalTables(Table t) {
Set<String> tbls = new HashSet<>();
for(TableColumn c : t.getColumns()) {
for(String prov : c.getProvenance()) {
tbls.add(prov.split("~")[0]);
}
}
return tbls;
}
}
|
winter-framework/src/main/java/de/uni_mannheim/informatik/dws/winter/webtables/app/ShowTableData.java
|
/*
* Copyright (c) 2017 Data and Web Science Group, University of Mannheim, Germany (http://dws.informatik.uni-mannheim.de/)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package de.uni_mannheim.informatik.dws.winter.webtables.app;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.logging.log4j.Logger;
import com.beust.jcommander.Parameter;
import au.com.bytecode.opencsv.CSVWriter;
import de.uni_mannheim.informatik.dws.winter.utils.Executable;
import de.uni_mannheim.informatik.dws.winter.utils.ProgressReporter;
import de.uni_mannheim.informatik.dws.winter.utils.WinterLogManager;
import de.uni_mannheim.informatik.dws.winter.utils.query.Func;
import de.uni_mannheim.informatik.dws.winter.utils.query.Q;
import de.uni_mannheim.informatik.dws.winter.webtables.Table;
import de.uni_mannheim.informatik.dws.winter.webtables.TableColumn;
import de.uni_mannheim.informatik.dws.winter.webtables.TableContext;
import de.uni_mannheim.informatik.dws.winter.webtables.TableRow;
import de.uni_mannheim.informatik.dws.winter.webtables.parsers.CsvTableParser;
import de.uni_mannheim.informatik.dws.winter.webtables.parsers.JsonTableParser;
import de.uni_mannheim.informatik.dws.winter.webtables.preprocessing.TableDisambiguationExtractor;
import de.uni_mannheim.informatik.dws.winter.webtables.preprocessing.TableNumberingExtractor;
import de.uni_mannheim.informatik.dws.winter.webtables.writers.JsonTableWriter;
/**
* @author Oliver Lehmberg (oli@dwslab.de)
*
*/
public class ShowTableData extends Executable {
@Parameter(names = "-d")
private boolean showData = false;
@Parameter(names = "-w")
private int columnWidth = 20;
@Parameter(names = "-keyColumnIndex")
private Integer keyColumnIndex = null;
@Parameter(names = "-convertValues")
private boolean convertValues = false;
@Parameter(names = "-update")
private boolean update = false;
@Parameter(names = "-detectKey")
private boolean detectKey = false;
@Parameter(names = "-listColumnIds")
private boolean listColumnIds;
@Parameter(names = "-header")
private boolean showHeader = false;
@Parameter(names = "-rows")
private int numRows = 0;
@Parameter(names = "-csv")
private boolean createCSV = false;
@Parameter(names = "-dep")
private boolean showDependencyInfo = false;
@Parameter(names = "-prov")
private boolean showProvenanceInfo = false;
@Parameter(names = "-pre")
private boolean applyPreprocessing = false;
private static final Logger logger = WinterLogManager.getLogger();
public static void main(String[] args) throws IOException {
ShowTableData s = new ShowTableData();
if(s.parseCommandLine(ShowTableData.class, args) && s.getParams()!=null) {
s.run();
}
}
public void run() throws IOException {
JsonTableParser p = new JsonTableParser();
JsonTableWriter w = new JsonTableWriter();
// p.setConvertValues(convertValues | detectKey);
CsvTableParser csvP = new CsvTableParser();
// csvP.setConvertValues(convertValues | detectKey);
String[] files = getParams().toArray(new String[getParams().size()]);
File dir = null;
if(files.length==1) {
dir = new File(files[0]);
if(dir.isDirectory()) {
files = dir.list();
} else {
dir = null;
}
}
ProgressReporter prg = new ProgressReporter(files.length, "Processing Tables");
CSVWriter csvW = null;
if(createCSV) {
csvW = new CSVWriter(new OutputStreamWriter(System.out));
}
for(String s : files) {
Table t = null;
File f = new File(s);
if(dir!=null) {
f = new File(dir,s);
}
if(s.endsWith("json")) {
t = p.parseTable(f);
} else if(s.endsWith("csv")) {
t = csvP.parseTable(f);
} else {
logger.error(String.format("Unknown table format '%s' (must be .json or .csv)", f.getName()));
continue;
}
if(applyPreprocessing) {
new TableDisambiguationExtractor().extractDisambiguations(Q.toList(t));
new TableNumberingExtractor().extractNumbering(Q.toList(t));
}
if(convertValues) {
t.convertValues();
}
// update the table if requested
if(detectKey) {
t.identifySubjectColumn(0.3,true);
logger.error(String.format("* Detected Entity-Label Column: %s", t.getSubjectColumn()==null ? "?" : t.getSubjectColumn().getHeader()));
}
if(keyColumnIndex!=null) {
logger.error(String.format("* Setting Entity-Label Column: %s", t.getSchema().get(keyColumnIndex)));
t.setSubjectColumnIndex(keyColumnIndex);
}
if(update) {
w.write(t, f);
}
if(createCSV) {
// create a csv file with the table meta data
csvW.writeNext(new String[] {
s,
Integer.toString(t.getRows().size()),
Integer.toString(t.getColumns().size()),
t.getContext()==null ? "" : t.getContext().getUrl(),
t.getContext()==null ? "" : t.getContext().getPageTitle(),
t.getContext()==null ? "" : t.getContext().getTableTitle(),
Integer.toString(getOriginalTables(t).size()),
t.getSubjectColumn()==null ? "" : Integer.toString(t.getSubjectColumn().getColumnIndex())
});
} else if(listColumnIds) {
// list the columns in the table
for(TableColumn c : t.getColumns()) {
if(!showHeader) {
System.out.println(c.getIdentifier());
} else {
System.out.println(c.toString());
}
}
} else {
// print the table meta data in human readable format
TableContext ctx = t.getContext();
System.out.println(String.format("*** Table %s ***", s));
if(ctx!=null) {
System.out.println(String.format("* URL: %s", ctx.getUrl()));
System.out.println(String.format("* Title: %s", ctx.getPageTitle()));
System.out.println(String.format("* Heading: %s", ctx.getTableTitle()));
}
System.out.println(String.format("* # Columns: %d", t.getColumns().size()));
System.out.println(String.format("* # Rows: %d", t.getRows().size()));
System.out.println(String.format("* Created from %d original tables", getOriginalTables(t).size()));
System.out.println(String.format("* Entity-Label Column: %s", t.getSubjectColumn()==null ? "?" : t.getSubjectColumn().getHeader()));
if(showProvenanceInfo) {
// collect all provenance data
Set<String> provenance = getOriginalTables(t);
if(provenance.size()>0) {
System.out.println("Provenance:");
System.out.println(String.format("\t%s",
StringUtils.join(Q.sort(provenance), ",")
));
} else {
System.out.println("Table has no provenance data attached.");
}
}
if(showDependencyInfo) {
if(t.getSchema().getFunctionalDependencies()!=null && t.getSchema().getFunctionalDependencies().size()>0) {
System.out.println("*** Functional Dependencies ***");
for(Collection<TableColumn> det : t.getSchema().getFunctionalDependencies().keySet()) {
Collection<TableColumn> dep = t.getSchema().getFunctionalDependencies().get(det);
System.out.println(
String.format(
"{%s}->{%s}",
StringUtils.join(Q.project(det, new TableColumn.ColumnHeaderProjection()), ","),
StringUtils.join(Q.project(dep, new TableColumn.ColumnHeaderProjection()), ",")));
}
}
if(t.getSchema().getCandidateKeys()!=null && t.getSchema().getCandidateKeys().size()>0) {
System.out.println("*** Candidate Keys ***");
for(Collection<TableColumn> candidateKey : t.getSchema().getCandidateKeys()) {
System.out.println(
String.format("{%s}", StringUtils.join(Q.project(candidateKey, new TableColumn.ColumnHeaderProjection()), ",")));
}
}
}
if(showData) {
System.out.println(t.getSchema().format(columnWidth));
System.out.println(t.getSchema().formatDataTypes(columnWidth));
int maxRows = Math.min(numRows, t.getRows().size());
if(maxRows==0) {
maxRows = t.getRows().size();
}
for(int i = 0; i < maxRows; i++) {
TableRow r = t.getRows().get(i);
if(showProvenanceInfo) {
System.out.println(StringUtils.join(r.getProvenance(), " / "));
}
System.out.println(r.format(columnWidth));
}
} else {
System.out.println(StringUtils.join(Q.project(t.getColumns(),
new Func<String, TableColumn>() {
@Override
public String invoke(TableColumn in) {
return String.format("%s (%s)", in.getHeader(), in.getDataType());
}}
), ", "));
}
prg.incrementProgress();
prg.report();
}
}
if(createCSV) {
csvW.close();
}
}
private Set<String> getOriginalTables(Table t) {
Set<String> tbls = new HashSet<>();
for(TableColumn c : t.getColumns()) {
for(String prov : c.getProvenance()) {
tbls.add(prov.split("~")[0]);
}
}
return tbls;
}
}
|
added exception handling
|
winter-framework/src/main/java/de/uni_mannheim/informatik/dws/winter/webtables/app/ShowTableData.java
|
added exception handling
|
|
Java
|
apache-2.0
|
de2f692602bd258ee2ce08afd22e88b258aa26ea
| 0
|
AlanJager/zstack,zstackorg/zstack,zstackio/zstack,AlanJager/zstack,zstackio/zstack,AlanJager/zstack,MatheMatrix/zstack,zstackio/zstack,MatheMatrix/zstack,zstackorg/zstack,MatheMatrix/zstack
|
package org.zstack.network.service.virtualrouter;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.appliancevm.*;
import org.zstack.appliancevm.ApplianceVmConstant.Params;
import org.zstack.core.asyncbatch.While;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.db.Q;
import org.zstack.core.db.SQL;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.ErrorCodeList;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.network.l2.L2NetworkGetVniExtensionPoint;
import org.zstack.header.network.l2.L2NetworkVO;
import org.zstack.header.network.l2.L2NetworkVO_;
import org.zstack.header.network.l3.*;
import org.zstack.header.network.service.*;
import org.zstack.header.rest.JsonAsyncRESTCallback;
import org.zstack.header.rest.RESTFacade;
import org.zstack.header.vm.*;
import org.zstack.network.service.MtuGetter;
import org.zstack.network.service.vip.*;
import org.zstack.network.service.virtualrouter.VirtualRouterCommands.PingCmd;
import org.zstack.network.service.virtualrouter.VirtualRouterCommands.PingRsp;
import org.zstack.network.service.virtualrouter.VirtualRouterConstant.Param;
import org.zstack.network.service.virtualrouter.ha.VirtualRouterHaBackend;
import org.zstack.network.service.virtualrouter.vip.VirtualRouterCreatePublicVipFlow;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.network.IPv6Constants;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.zstack.core.Platform.*;
import static org.zstack.network.service.virtualrouter.VirtualRouterNicMetaData.ADDITIONAL_PUBLIC_NIC_MASK;
import static org.zstack.network.service.virtualrouter.VirtualRouterNicMetaData.GUEST_NIC_MASK;
/**
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class VirtualRouter extends ApplianceVmBase {
private static final CLogger logger = Utils.getLogger(VirtualRouter.class);
static {
allowedOperations.addState(VmInstanceState.Running, APIReconnectVirtualRouterMsg.class.getName());
allowedOperations.addState(VmInstanceState.Running, APIUpdateVirtualRouterMsg.class.getName());
allowedOperations.addState(VmInstanceState.Running, ReconnectVirtualRouterVmMsg.class.getName());
}
@Autowired
protected VirtualRouterManager vrMgr;
@Autowired
protected RESTFacade restf;
@Autowired
protected VirtualRouterHaBackend haBackend;
@Autowired
protected VirutalRouterDefaultL3ConfigProxy defaultL3ConfigProxy;
protected VirtualRouterVmInventory vr;
public VirtualRouter(ApplianceVmVO vo) {
super(vo);
}
public VirtualRouter(VirtualRouterVmVO vo) {
super(vo);
vr = new VirtualRouterVmInventory(vo);
}
@Override
protected VmInstanceInventory getSelfInventory() {
return VirtualRouterVmInventory.valueOf(getSelf());
}
@Override
protected List<Flow> getPostCreateFlows() {
return vrMgr.getPostCreateFlows();
}
@Override
protected List<Flow> getPostStartFlows() {
return vrMgr.getPostStartFlows();
}
@Override
protected List<Flow> getPostStopFlows() {
return vrMgr.getPostStopFlows();
}
@Override
protected List<Flow> getPostRebootFlows() {
return vrMgr.getPostRebootFlows();
}
@Override
protected List<Flow> getPostDestroyFlows() {
return vrMgr.getPostDestroyFlows();
}
@Override
protected List<Flow> getPostMigrateFlows() {
return vrMgr.getPostMigrateFlows();
}
protected FlowChain getReconnectChain() {
return vrMgr.getReconnectFlowChain();
}
@Override
protected void handleApiMessage(APIMessage msg) {
if (msg instanceof APIReconnectVirtualRouterMsg) {
handle((APIReconnectVirtualRouterMsg) msg);
} else if (msg instanceof APIUpdateVirtualRouterMsg) {
handle((APIUpdateVirtualRouterMsg) msg);
} else {
super.handleApiMessage(msg);
}
}
@Override
protected void handleLocalMessage(Message msg) {
if (msg instanceof VirtualRouterAsyncHttpCallMsg) {
handle((VirtualRouterAsyncHttpCallMsg) msg);
} else if (msg instanceof ReconnectVirtualRouterVmMsg) {
handle((ReconnectVirtualRouterVmMsg) msg);
} else if (msg instanceof PingVirtualRouterVmMsg) {
handle((PingVirtualRouterVmMsg) msg);
} else {
super.handleLocalMessage(msg);
}
}
private void handle(final PingVirtualRouterVmMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("ping-virtualrouter-%s", self.getUuid());
}
@Override
public void run(final SyncTaskChain chain) {
final PingVirtualRouterVmReply reply = new PingVirtualRouterVmReply();
if ((VmInstanceState.Running != self.getState() && VmInstanceState.Unknown != self.getState())
|| ApplianceVmStatus.Connecting == getSelf().getStatus()) {
reply.setDoReconnect(false);
bus.reply(msg, reply);
chain.next();
return;
}
PingCmd cmd = new PingCmd();
cmd.setUuid(self.getUuid());
restf.asyncJsonPost(buildUrl(vr.getManagementNic().getIp(), VirtualRouterConstant.VR_PING), cmd, new JsonAsyncRESTCallback<PingRsp>(msg, chain) {
@Override
public void fail(ErrorCode err) {
reply.setDoReconnect(true);
reply.setConnected(false);
logger.warn(String.format("failed to ping the virtual router vm[uuid:%s], %s. We will reconnect it soon", self.getUuid(), reply.getError()));
bus.reply(msg, reply);
chain.next();
}
@Override
public void success(PingRsp ret) {
reply.setDoReconnect(true);
if (!ret.isSuccess()) {
logger.warn(String.format("failed to ping the virtual router vm[uuid:%s], %s. We will reconnect it soon", self.getUuid(), ret.getError()));
reply.setConnected(false);
} else {
boolean connected = self.getUuid().equals(ret.getUuid());
if (!connected) {
logger.warn(String.format("a signature lost on the virtual router vm[uuid:%s] changed, it's probably caused by the agent restart. We will issue a reconnect soon", self.getUuid()));
} else {
connected = ApplianceVmStatus.Connected == getSelf().getStatus();
}
reply.setConnected(connected);
reply.setHaStatus(ret.getHaStatus());
if ((ret.getHealthy() != null) && (!ret.getHealthy()) && (ret.getHealthDetail() != null)) {
fireServiceUnhealthyCanonicalEvent(inerr("virtual router %s unhealthy, detail %s", getSelf().getUuid(), ret.getHealthDetail()));
} else {
fireServicehealthyCanonicalEvent();
}
}
bus.reply(msg, reply);
chain.next();
}
@Override
public Class<PingRsp> getReturnClass() {
return PingRsp.class;
}
}, TimeUnit.SECONDS, (long)ApplianceVmGlobalConfig.CONNECT_TIMEOUT.value(Integer.class));
}
@Override
public String getName() {
return "ping-virtual-router";
}
});
}
private void handle(final ReconnectVirtualRouterVmMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final ReconnectVirtualRouterVmReply reply = new ReconnectVirtualRouterVmReply();
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
if (msg.isStatusChange()) {
changeApplianceVmStatus(ApplianceVmStatus.Disconnected);
}
reply.setError(allowed);
bus.reply(msg, reply);
chain.next();
return;
}
reconnect(new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-virtual-router-%s", self.getUuid());
}
});
}
protected String buildUrl(String mgmtIp, String path) {
return vrMgr.buildUrl(mgmtIp, path);
}
private void handle(final VirtualRouterAsyncHttpCallMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("%s-commands", syncThreadName);
}
@Override
public void run(final SyncTaskChain chain) {
refreshVO();
final VirtualRouterAsyncHttpCallReply reply = new VirtualRouterAsyncHttpCallReply();
if (msg.isCheckStatus() && getSelf().getState() != VmInstanceState.Running) {
throw new OperationFailureException(operr("the virtual router[name:%s, uuid:%s, current state:%s] is not running," +
"and cannot perform required operation. Please retry your operation later once it is running", self.getName(), self.getUuid(), self.getState()));
}
if (msg.isCheckStatus() && getSelf().getStatus() != ApplianceVmStatus.Connected) {
throw new OperationFailureException(operr("virtual router[uuid:%s] is in status of %s that cannot make http call to %s",
self.getUuid(), getSelf().getStatus(), msg.getPath()));
}
if (vr.getManagementNic() == null) {
throw new OperationFailureException(operr("virtual router[uuid:%s] has no management nic that cannot make http call to %s",
self.getUuid(), msg.getPath()));
}
restf.asyncJsonPost(buildUrl(vr.getManagementNic().getIp(), msg.getPath()), msg.getCommand(), new JsonAsyncRESTCallback<LinkedHashMap>(msg, chain) {
@Override
public void fail(ErrorCode err) {
reply.setError(err);
bus.reply(msg, reply);
chain.next();
}
@Override
public void success(LinkedHashMap ret) {
reply.setResponse(ret);
bus.reply(msg, reply);
chain.next();
}
@Override
public Class<LinkedHashMap> getReturnClass() {
return LinkedHashMap.class;
}
});
}
@Override
protected int getSyncLevel() {
return vrMgr.getParallelismDegree(self.getUuid());
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void handle(final APIUpdateVirtualRouterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final APIUpdateVirtualRouterEvent evt = new APIUpdateVirtualRouterEvent(msg.getId());
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
evt.setError(allowed);
bus.publish(evt);
chain.next();
return;
}
updateVirutalRouter(msg, new Completion(msg, chain) {
@Override
public void success() {
VirtualRouterVmVO vrVO = dbf.findByUuid(msg.getVmInstanceUuid(), VirtualRouterVmVO.class);
evt.setInventory((VirtualRouterVmInventory.valueOf(vrVO)));
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("update-virtual-router-%s", self.getUuid());
}
});
}
@Transactional
protected void replaceVirtualRouterDefaultNetwork(String vrUuid, String oldL3Uuid, String newL3Uuud) {
defaultL3ConfigProxy.detachNetworkService(vrUuid, VirtualRouterConstant.VR_DEFAULT_ROUTE_NETWORK,
Collections.singletonList(oldL3Uuid));
defaultL3ConfigProxy.attachNetworkService(vrUuid, VirtualRouterConstant.VR_DEFAULT_ROUTE_NETWORK,
Collections.singletonList(newL3Uuud));
}
private void updateVirutalRouter(APIUpdateVirtualRouterMsg msg, final Completion completion) {
VirtualRouterVmVO vrVO = dbf.findByUuid(msg.getVmInstanceUuid(), VirtualRouterVmVO.class);
FlowChain fchain = FlowChainBuilder.newSimpleFlowChain();
fchain.setName(String.format("update-virtual-router-%s", msg.getVmInstanceUuid()));
fchain.then(new Flow() {
String __name__ = "update-virtual-router-db";
@Override
public void run(FlowTrigger trigger, Map data) {
replaceVirtualRouterDefaultNetwork(msg.getVmInstanceUuid(), vrVO.getDefaultRouteL3NetworkUuid(),
msg.getDefaultRouteL3NetworkUuid());
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
replaceVirtualRouterDefaultNetwork(msg.getVmInstanceUuid(), msg.getDefaultRouteL3NetworkUuid(),
vrVO.getDefaultRouteL3NetworkUuid());
trigger.rollback();
}
}).then(new Flow() {
String __name__ = "release-old-snat-of-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicVO oldNic = null;
for (VmNicVO nic: vrVO.getVmNics()) {
if (nic.getL3NetworkUuid().equals(vrVO.getDefaultRouteL3NetworkUuid())) {
oldNic = nic;
break;
}
}
if (oldNic == null) {
trigger.next();
return;
}
String vipIp = oldNic.getIp();
if (vrVO.getDefaultRouteL3NetworkUuid().equals(vrVO.getManagementNetworkUuid())) {
VmNicInventory publicNic = vrMgr.getSnatPubicInventory(VirtualRouterVmInventory.valueOf(vrVO));
vipIp = publicNic.getIp();
}
VipVO vipVO = Q.New(VipVO.class).eq(VipVO_.ip, vipIp)
.eq(VipVO_.l3NetworkUuid, oldNic.getL3NetworkUuid()).find();
if (vipVO == null) {
trigger.next();
return;
}
data.put("oldVip", vipVO);
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VipVO vipVO = (VipVO) data.get("oldVip");
if (vipVO == null) {
trigger.rollback();
return;
}
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(trigger) {
@Override
public void success() {
trigger.rollback();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.rollback();
}
});
}
}).then(new Flow() {
String __name__ = "apply-new-snat-of-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicVO newNic = null;
for (VmNicVO nic: vrVO.getVmNics()) {
if (nic.getL3NetworkUuid().equals(msg.getDefaultRouteL3NetworkUuid())) {
newNic = nic;
break;
}
}
if (newNic == null) {
trigger.fail(argerr("virtual router [uuid:%s] does not has nic in l3 network [uuid:s]", vrVO.getUuid(),
msg.getDefaultRouteL3NetworkUuid()));
return;
}
String vipIp = newNic.getIp();
if (msg.getDefaultRouteL3NetworkUuid().equals(vrVO.getManagementNetworkUuid())) {
VirtualRouterVmInventory vrInv = VirtualRouterVmInventory.valueOf(vrVO);
vrInv.setDefaultRouteL3NetworkUuid(msg.getDefaultRouteL3NetworkUuid());
VmNicInventory publicNic = vrMgr.getSnatPubicInventory(vrInv);
vipIp = publicNic.getIp();
}
VipVO vipVO = Q.New(VipVO.class).eq(VipVO_.ip, vipIp)
.eq(VipVO_.l3NetworkUuid, newNic.getL3NetworkUuid()).find();
if (vipVO == null) {
trigger.fail(argerr("there is no vip [ip:%s] in l3 network [uuid:%s]", vipIp,
msg.getDefaultRouteL3NetworkUuid()));
return;
}
data.put("newVip", vipVO);
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VipVO vipVO = (VipVO) data.get("newVip");
if (vipVO == null) {
trigger.rollback();
return;
}
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.rollback();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.rollback();
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "update-virtual-router-backend";
@Override
public void run(FlowTrigger trigger, Map data) {
vrMgr.changeVirutalRouterDefaultL3Network(msg.getVmInstanceUuid(), msg.getDefaultRouteL3NetworkUuid(), vrVO.getDefaultRouteL3NetworkUuid(), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
Map<String, Object> haData = new HashMap<>();
haData.put(VirtualRouterHaCallbackInterface.Params.TaskName.toString(), VirtualRouterConstant.VR_CHANGE_DEFAULT_ROUTE_JOB);
haData.put(VirtualRouterHaCallbackInterface.Params.OriginRouterUuid.toString(), msg.getVmInstanceUuid());
haData.put(VirtualRouterHaCallbackInterface.Params.Struct.toString(), msg.getDefaultRouteL3NetworkUuid());
haData.put(VirtualRouterHaCallbackInterface.Params.Struct1.toString(), vrVO.getDefaultRouteL3NetworkUuid());
haBackend.submitVirutalRouterHaTask(haData, completion);
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private void handle(final APIReconnectVirtualRouterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final APIReconnectVirtualRouterEvent evt = new APIReconnectVirtualRouterEvent(msg.getId());
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
evt.setError(allowed);
bus.publish(evt);
chain.next();
return;
}
reconnect(new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory((ApplianceVmInventory) getSelfInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-virtual-router-%s", self.getUuid());
}
});
}
private void reconnect(final Completion completion) {
ApplianceVmStatus oldStatus = getSelf().getStatus();
FlowChain chain = getReconnectChain();
chain.setName(String.format("reconnect-virtual-router-%s", self.getUuid()));
chain.getData().put(VirtualRouterConstant.Param.VR.toString(), vr);
chain.getData().put(Param.IS_RECONNECT.toString(), Boolean.TRUE.toString());
chain.getData().put(Params.isReconnect.toString(), Boolean.TRUE.toString());
chain.getData().put(Params.managementNicIp.toString(), vr.getManagementNic().getIp());
chain.getData().put(Params.applianceVmUuid.toString(), self.getUuid());
SimpleQuery<ApplianceVmFirewallRuleVO> q = dbf.createQuery(ApplianceVmFirewallRuleVO.class);
q.add(ApplianceVmFirewallRuleVO_.applianceVmUuid, Op.EQ, getSelf().getUuid());
List<ApplianceVmFirewallRuleVO> vos = q.list();
List<ApplianceVmFirewallRuleInventory> rules = ApplianceVmFirewallRuleInventory.valueOf(vos);
chain.getData().put(ApplianceVmConstant.Params.applianceVmFirewallRules.toString(), rules);
chain.insert(new Flow() {
String __name__ = "change-appliancevm-status-to-connecting";
@Override
public void run(FlowTrigger trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Connecting);
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Disconnected);
fireDisconnectedCanonicalEvent(operr("appliance vm %s reconnect failed",
getSelf().getUuid()));
trigger.rollback();
}
}).then(new NoRollbackFlow() {
String __name__ = "change-appliancevm-status-to-connected";
@Override
public void run(FlowTrigger trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Connected);
trigger.next();
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
self = dbf.reload(self);
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
if (oldStatus == ApplianceVmStatus.Connected) {
fireDisconnectedCanonicalEvent(errCode);
}
completion.fail(errCode);
}
}).start();
}
public class virtualRouterAfterAttachNicFlow extends NoRollbackFlow {
@Override
public void run(FlowTrigger trigger, Map data) {
boolean applyToVirtualRouter = (boolean)data.get(Param.APPLY_TO_VIRTUALROUTER.toString());
if (!applyToVirtualRouter) {
trigger.next();
return;
}
VmNicInventory nicInventory = (VmNicInventory) data.get(Param.VR_NIC.toString());
L3NetworkVO l3NetworkVO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, nicInventory.getL3NetworkUuid()).find();
VirtualRouterCommands.ConfigureNicCmd cmd = new VirtualRouterCommands.ConfigureNicCmd();
VirtualRouterCommands.NicInfo info = new VirtualRouterCommands.NicInfo();
info.setDefaultRoute(false);
info.setMac(nicInventory.getMac());
info.setNetmask(nicInventory.getNetmask());
for (UsedIpInventory ip : nicInventory.getUsedIps()) {
if (ip.getIpVersion() == IPv6Constants.IPv4) {
info.setIp(ip.getIp());
info.setGateway(ip.getGateway());
info.setNetmask(ip.getNetmask());
} else {
info.setIp6(ip.getIp());
info.setGateway6(ip.getGateway());
NormalIpRangeVO ipr = Q.New(NormalIpRangeVO.class).eq(NormalIpRangeVO_.uuid, ip.getIpRangeUuid()).find();
info.setPrefixLength(ipr.getPrefixLen());
info.setAddressMode(ipr.getAddressMode());
}
}
L2NetworkVO l2NetworkVO = Q.New(L2NetworkVO.class).eq(L2NetworkVO_.uuid, l3NetworkVO.getL2NetworkUuid()).find();
info.setCategory(l3NetworkVO.getCategory().toString());
info.setL2type(l2NetworkVO.getType());
info.setPhysicalInterface(l2NetworkVO.getPhysicalInterface());
for (L2NetworkGetVniExtensionPoint ext : pluginRgty.getExtensionList(L2NetworkGetVniExtensionPoint.class)) {
if (ext.getL2NetworkVniType().equals(l2NetworkVO.getType())) {
info.setVni(ext.getL2NetworkVni(l2NetworkVO.getUuid(), vr.getHostUuid()));
}
}
info.setMtu(new MtuGetter().getMtu(l3NetworkVO.getUuid()));
cmd.setNics(Arrays.asList(info));
VirtualRouterAsyncHttpCallMsg cmsg = new VirtualRouterAsyncHttpCallMsg();
cmsg.setCommand(cmd);
cmsg.setPath(VirtualRouterConstant.VR_CONFIGURE_NIC_PATH);
cmsg.setVmInstanceUuid(vr.getUuid());
cmsg.setCheckStatus(true);
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, vr.getUuid());
bus.send(cmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
VirtualRouterAsyncHttpCallReply re = reply.castReply();
VirtualRouterCommands.ConfigureNicRsp rsp = re.toResponse(VirtualRouterCommands.ConfigureNicRsp.class);
if (rsp.isSuccess()) {
logger.debug(String.format("successfully add nic[ip:%s, mac:%s] to virtual router vm[uuid:%s, ip:%s]",
info.getIp(), info.getMac(), vr.getUuid(), vr.getManagementNic().getIp()));
trigger.next();
} else {
ErrorCode err = operr("unable to add nic[ip:%s, mac:%s] to virtual router vm[uuid:%s ip:%s], because %s",
info.getIp(), info.getMac(), vr.getUuid(), vr.getManagementNic().getIp(), rsp.getError());
trigger.fail(err);
}
}
});
}
}
private class virtualRouterApplyServicesAfterAttachNicFlow implements Flow {
String __name__ = "virtualRouter-apply-services-afterAttachNic";
private void virtualRouterApplyServicesAfterAttachNic(Iterator<VirtualRouterAfterAttachNicExtensionPoint> it, VmNicInventory nicInv, Completion completion){
if (!it.hasNext()) {
completion.success();
return;
}
VirtualRouterAfterAttachNicExtensionPoint ext = it.next();
logger.debug(String.format("execute afterAttachNic extension %s", ext.getClass().getSimpleName()));
ext.afterAttachNic(nicInv, new Completion(completion) {
@Override
public void success() {
virtualRouterApplyServicesAfterAttachNic(it, nicInv, completion);
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
boolean applyToVirtualRouter = (boolean)data.get(Param.APPLY_TO_VIRTUALROUTER.toString());
if (!applyToVirtualRouter) {
trigger.next();
return;
}
if (nicInv.isIpv6OnlyNic()) {
trigger.next();
return;
}
Iterator<VirtualRouterAfterAttachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterAfterAttachNicExtensionPoint.class).iterator();
virtualRouterApplyServicesAfterAttachNic(it, nicInv, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
private void virtualRouterApplyServicesAfterAttachNicRollback(Iterator<VirtualRouterAfterAttachNicExtensionPoint> it, VmNicInventory nicInv, NoErrorCompletion completion){
if (!it.hasNext()) {
completion.done();
return;
}
VirtualRouterAfterAttachNicExtensionPoint ext = it.next();
ext.afterAttachNicRollback(nicInv, new NoErrorCompletion(completion) {
@Override
public void done() {
virtualRouterApplyServicesAfterAttachNicRollback(it, nicInv, completion);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
Iterator<VirtualRouterAfterAttachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterAfterAttachNicExtensionPoint.class).iterator();
virtualRouterApplyServicesAfterAttachNicRollback(it, nicInv, new NoErrorCompletion() {
@Override
public void done() {
trigger.rollback();
}
});
}
}
@Override
protected void afterAttachNic(VmNicInventory nicInventory, Completion completion) {
super.afterAttachNic(nicInventory, true, completion);
}
@Override
protected void afterAttachNic(VmNicInventory nicInventory, boolean applyToBackend, Completion completion) {
VmNicVO vo = Q.New(VmNicVO.class).eq(VmNicVO_.uuid, nicInventory.getUuid()).find();
L3NetworkVO l3NetworkVO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, vo.getL3NetworkUuid()).find();
if (l3NetworkVO.getCategory().equals(L3NetworkCategory.Private)) {
vo.setMetaData(GUEST_NIC_MASK.toString());
UsedIpVO usedIpVO = Q.New(UsedIpVO.class).eq(UsedIpVO_.uuid, nicInventory.getUsedIpUuid()).find();
usedIpVO.setMetaData(GUEST_NIC_MASK.toString());
dbf.updateAndRefresh(usedIpVO);
} else {
vo.setMetaData(ADDITIONAL_PUBLIC_NIC_MASK.toString());
}
vo = dbf.updateAndRefresh(vo);
logger.debug(String.format("updated metadata of vmnic[uuid: %s]", vo.getUuid()));
VirtualRouterVmVO vrVo = dbf.findByUuid(self.getUuid(), VirtualRouterVmVO.class);
Map<String, Object> data = new HashMap<String, Object>();
data.put(Param.VR_NIC.toString(), VmNicInventory.valueOf(vo));
data.put(Param.SNAT.toString(), Boolean.FALSE);
data.put(Param.VR.toString(), VirtualRouterVmInventory.valueOf(vrVo));
data.put(Param.APPLY_TO_VIRTUALROUTER.toString(), applyToBackend);
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("apply-services-after-attach-nic-%s-from-virtualrouter-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid()));
chain.setData(data);
chain.then(new virtualRouterAfterAttachNicFlow());
chain.then(new VirtualRouterCreatePublicVipFlow());
chain.then(new virtualRouterApplyServicesAfterAttachNicFlow());
chain.then(haBackend.getAttachL3NetworkFlow());
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private void virtualRouterAfterDetachNic(Iterator<VirtualRouterAfterDetachNicExtensionPoint> exts, VmNicInventory nicInventory, Completion completion) {
if (!exts.hasNext()) {
completion.success();
return;
}
VirtualRouterAfterDetachNicExtensionPoint ext = exts.next();
ext.afterDetachVirtualRouterNic(nicInventory, new Completion(completion) {
@Override
public void success() {
virtualRouterAfterDetachNic(exts, nicInventory, completion);
}
@Override
public void fail(ErrorCode errorCode) {
virtualRouterAfterDetachNic(exts, nicInventory, completion);
}
});
}
@Override
protected void afterDetachNic(VmNicInventory nicInventory, boolean isRollback, Completion completion) {
if (isRollback) {
completion.success();
return;
}
List<VirtualRouterAfterDetachNicExtensionPoint> exts = pluginRgty.getExtensionList(VirtualRouterAfterDetachNicExtensionPoint.class);
virtualRouterAfterDetachNic(exts.iterator(), nicInventory, new Completion(completion) {
@Override
public void success() {
haBackend.detachL3NetworkFromVirtualRouterHaGroup(nicInventory.getVmInstanceUuid(),
nicInventory.getL3NetworkUuid(), isRollback, completion);
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
private static class virtualRouterReleaseVipbeforeDetachNic extends NoRollbackFlow {
@Autowired
private VipManager vipMgr;
String __name__ = "virtualRouter-beforeDetachNic";
private void virtualRouterReleaseVipServices(Iterator<String> it, VipInventory vip, Completion completion) {
if (!it.hasNext()) {
completion.success();
return;
}
String service = it.next();
VipReleaseExtensionPoint ext = vipMgr.getVipReleaseExtensionPoint(service);
ext.releaseServicesOnVip(vip, new Completion(completion) {
@Override
public void success() {
virtualRouterReleaseVipServices(it, vip, completion);
}
@Override
public void fail(ErrorCode errorCode) {
/* even failed, continue release */
virtualRouterReleaseVipServices(it, vip, completion);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nic = (VmNicInventory) data.get(Param.VR_NIC.toString());
if (!VirtualRouterNicMetaData.GUEST_NIC_MASK_STRING_LIST.contains(nic.getMetaData())) {
trigger.next();
return;
}
if (VirtualRouterSystemTags.DEDICATED_ROLE_VR.hasTag(nic.getVmInstanceUuid())) {
trigger.next();
return;
}
VirtualRouterVmInventory vr = VirtualRouterVmInventory.valueOf((VirtualRouterVmVO)
Q.New(VirtualRouterVmVO.class).eq(VirtualRouterVmVO_.uuid, nic.getVmInstanceUuid()).find());
/*
* fixme: to be done, the active will not done in standby vpc router because
* VipPeerL3NetworkRefVO record has been deleted during that of master router
* this will result the vip delete action will not done in agent before detach nic and
* not delete some configure such as vip QoS, ifbx.
*/
List<VipVO> vips = SQL.New("select distinct vip from VipVO vip, VipPeerL3NetworkRefVO ref " +
"where ref.vipUuid = vip.uuid and ref.l3NetworkUuid in (:routerNetworks) " +
"and vip.l3NetworkUuid = :l3Uuid")
.param("l3Uuid", nic.getL3NetworkUuid())
.param("routerNetworks", vr.getAllL3Networks())
.list();
if (vips.isEmpty()) {
trigger.next();
return;
}
ErrorCodeList errList = new ErrorCodeList();
new While<>(vips).all((vip, completion) -> {
Set<String> services = vip.getServicesTypes();
if (services == null || services.isEmpty()) {
completion.done();
return;
}
virtualRouterReleaseVipServices(services.iterator(), VipInventory.valueOf(vip), new Completion(completion) {
@Override
public void success() {
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
errList.getCauses().add(errorCode);
completion.done();
}
});
}).run(new NoErrorCompletion() {
@Override
public void done() {
if (errList.getCauses().size() > 0) {
trigger.fail(errList.getCauses().get(0));
} else {
trigger.next();
}
}
});
}
}
public class virtualRouterbeforeDetachNic extends NoRollbackFlow {
String __name__ = "virtualRouter-beforeDetachNic";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInventory = (VmNicInventory) data.get(Param.VR_NIC.toString());
VirtualRouterCommands.RemoveNicCmd cmd = new VirtualRouterCommands.RemoveNicCmd();
VirtualRouterCommands.NicInfo info = new VirtualRouterCommands.NicInfo();
info.setIp(nicInventory.getIp());
info.setDefaultRoute(false);
info.setGateway(nicInventory.getGateway());
info.setMac(nicInventory.getMac());
info.setNetmask(nicInventory.getNetmask());
cmd.setNics(Arrays.asList(info));
VirtualRouterAsyncHttpCallMsg cmsg = new VirtualRouterAsyncHttpCallMsg();
cmsg.setCommand(cmd);
cmsg.setPath(VirtualRouterConstant.VR_REMOVE_NIC_PATH);
cmsg.setVmInstanceUuid(vr.getUuid());
cmsg.setCheckStatus(true);
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, vr.getUuid());
bus.send(cmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
logger.warn(String.format("detach nic[%s] from virtual router vm[uuid:%s, ip:%s] failed because %s",
info, vr.getUuid(), vr.getManagementNic().getIp(), reply.getError().getDetails()));
trigger.next();
return;
}
VirtualRouterAsyncHttpCallReply re = reply.castReply();
VirtualRouterCommands.RemoveNicRsp rsp = re.toResponse(VirtualRouterCommands.RemoveNicRsp.class);
if (rsp.isSuccess()) {
logger.debug(String.format("successfully detach nic[%s] from virtual router vm[uuid:%s, ip:%s]",info, vr.getUuid(), vr.getManagementNic()
.getIp()));
trigger.next();
} else {
logger.warn(String.format("unable to detach nic[%s] from virtual router vm[uuid:%s ip:%s], because %s",
info, vr.getUuid(), vr.getManagementNic().getIp(), rsp.getError()));
trigger.next();;
}
}
});
}
}
private class virtualRouterReleaseServicesbeforeDetachNicFlow implements Flow {
String __name__ = "virtualRouter-release-services-before-detach-nic";
private void virtualRouterReleaseServices(final Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it, VmNicInventory nicInv, Completion completion) {
if (!it.hasNext()) {
completion.success();
return;
}
VirtualRouterBeforeDetachNicExtensionPoint ext = it.next();
logger.debug(String.format("virtual router release service before detach l3 network for %s", ext.getClass().getSimpleName()));
ext.beforeDetachNic(nicInv, new Completion(completion) {
@Override
public void success() {
virtualRouterReleaseServices(it, nicInv, completion);
}
@Override
public void fail(ErrorCode errorCode) {
/* even failed, continue the release process */
virtualRouterReleaseServices(it, nicInv, completion);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterBeforeDetachNicExtensionPoint.class).iterator();
virtualRouterReleaseServices(it, nicInv, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
private void virtualRouterReleaseServicesRollback(final Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it, VmNicInventory nicInv, NoErrorCompletion completion) {
if (!it.hasNext()) {
completion.done();
return;
}
VirtualRouterBeforeDetachNicExtensionPoint ext = it.next();
ext.beforeDetachNicRollback(nicInv, new NoErrorCompletion(completion) {
@Override
public void done() {
virtualRouterReleaseServicesRollback(it, nicInv, completion);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterBeforeDetachNicExtensionPoint.class).iterator();
virtualRouterReleaseServicesRollback(it, nicInv, new NoErrorCompletion(trigger) {
@Override
public void done() {
trigger.rollback();
}
});
}
}
@Override
protected void beforeDetachNic(VmNicInventory nicInventory, Completion completion) {
Map data = new HashMap();
data.put(Param.VR_NIC.toString(), nicInventory);
data.put(Param.VR.toString(), vr);
ApplianceVmVO appvm = Q.New(ApplianceVmVO.class)
.eq(ApplianceVmVO_.uuid, nicInventory.getVmInstanceUuid()).find();
if (appvm.getStatus().equals(ApplianceVmStatus.Disconnected)) {
logger.debug(String.format("appliance vm[uuid: %s] current status is [%s], skip before detach nic",
appvm.getUuid(), appvm.getStatus()));
completion.success();
return;
}
if (appvm.getState().equals(VmInstanceState.Stopped)) {
logger.debug(String.format("appliance vm[uuid: %s] current state is [%s], skip before detach nic",
appvm.getUuid(), appvm.getStatus()));
completion.success();
return;
}
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("release-services-before-detach-nic-%s-from-virtualrouter-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid()));
chain.setData(data);
chain.insert(new virtualRouterReleaseServicesbeforeDetachNicFlow());
chain.then(new virtualRouterReleaseVipbeforeDetachNic());
chain.then(new virtualRouterbeforeDetachNic());
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
}
|
plugin/virtualRouterProvider/src/main/java/org/zstack/network/service/virtualrouter/VirtualRouter.java
|
package org.zstack.network.service.virtualrouter;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.appliancevm.*;
import org.zstack.appliancevm.ApplianceVmConstant.Params;
import org.zstack.core.asyncbatch.While;
import org.zstack.core.cloudbus.CloudBusCallBack;
import org.zstack.core.db.Q;
import org.zstack.core.db.SQL;
import org.zstack.core.db.SimpleQuery;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.thread.ChainTask;
import org.zstack.core.thread.SyncTaskChain;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.header.core.Completion;
import org.zstack.header.core.NoErrorCompletion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.ErrorCodeList;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.errorcode.SysErrors;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.MessageReply;
import org.zstack.header.network.l2.L2NetworkGetVniExtensionPoint;
import org.zstack.header.network.l2.L2NetworkVO;
import org.zstack.header.network.l2.L2NetworkVO_;
import org.zstack.header.network.l3.*;
import org.zstack.header.network.service.*;
import org.zstack.header.rest.JsonAsyncRESTCallback;
import org.zstack.header.rest.RESTFacade;
import org.zstack.header.vm.*;
import org.zstack.network.service.MtuGetter;
import org.zstack.network.service.vip.*;
import org.zstack.network.service.virtualrouter.VirtualRouterCommands.PingCmd;
import org.zstack.network.service.virtualrouter.VirtualRouterCommands.PingRsp;
import org.zstack.network.service.virtualrouter.VirtualRouterConstant.Param;
import org.zstack.network.service.virtualrouter.ha.VirtualRouterHaBackend;
import org.zstack.network.service.virtualrouter.vip.VirtualRouterCreatePublicVipFlow;
import org.zstack.utils.Utils;
import org.zstack.utils.logging.CLogger;
import org.zstack.utils.network.IPv6Constants;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static org.zstack.core.Platform.*;
import static org.zstack.network.service.virtualrouter.VirtualRouterNicMetaData.ADDITIONAL_PUBLIC_NIC_MASK;
import static org.zstack.network.service.virtualrouter.VirtualRouterNicMetaData.GUEST_NIC_MASK;
/**
*/
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
public class VirtualRouter extends ApplianceVmBase {
private static final CLogger logger = Utils.getLogger(VirtualRouter.class);
static {
allowedOperations.addState(VmInstanceState.Running, APIReconnectVirtualRouterMsg.class.getName());
allowedOperations.addState(VmInstanceState.Running, APIUpdateVirtualRouterMsg.class.getName());
allowedOperations.addState(VmInstanceState.Running, ReconnectVirtualRouterVmMsg.class.getName());
}
@Autowired
protected VirtualRouterManager vrMgr;
@Autowired
protected RESTFacade restf;
@Autowired
protected VirtualRouterHaBackend haBackend;
@Autowired
protected VirutalRouterDefaultL3ConfigProxy defaultL3ConfigProxy;
protected VirtualRouterVmInventory vr;
public VirtualRouter(ApplianceVmVO vo) {
super(vo);
}
public VirtualRouter(VirtualRouterVmVO vo) {
super(vo);
vr = new VirtualRouterVmInventory(vo);
}
@Override
protected VmInstanceInventory getSelfInventory() {
return VirtualRouterVmInventory.valueOf(getSelf());
}
@Override
protected List<Flow> getPostCreateFlows() {
return vrMgr.getPostCreateFlows();
}
@Override
protected List<Flow> getPostStartFlows() {
return vrMgr.getPostStartFlows();
}
@Override
protected List<Flow> getPostStopFlows() {
return vrMgr.getPostStopFlows();
}
@Override
protected List<Flow> getPostRebootFlows() {
return vrMgr.getPostRebootFlows();
}
@Override
protected List<Flow> getPostDestroyFlows() {
return vrMgr.getPostDestroyFlows();
}
@Override
protected List<Flow> getPostMigrateFlows() {
return vrMgr.getPostMigrateFlows();
}
protected FlowChain getReconnectChain() {
return vrMgr.getReconnectFlowChain();
}
@Override
protected void handleApiMessage(APIMessage msg) {
if (msg instanceof APIReconnectVirtualRouterMsg) {
handle((APIReconnectVirtualRouterMsg) msg);
} else if (msg instanceof APIUpdateVirtualRouterMsg) {
handle((APIUpdateVirtualRouterMsg) msg);
} else {
super.handleApiMessage(msg);
}
}
@Override
protected void handleLocalMessage(Message msg) {
if (msg instanceof VirtualRouterAsyncHttpCallMsg) {
handle((VirtualRouterAsyncHttpCallMsg) msg);
} else if (msg instanceof ReconnectVirtualRouterVmMsg) {
handle((ReconnectVirtualRouterVmMsg) msg);
} else if (msg instanceof PingVirtualRouterVmMsg) {
handle((PingVirtualRouterVmMsg) msg);
} else {
super.handleLocalMessage(msg);
}
}
private void handle(final PingVirtualRouterVmMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final PingVirtualRouterVmReply reply = new PingVirtualRouterVmReply();
if (VmInstanceState.Running != self.getState() || ApplianceVmStatus.Connecting == getSelf().getStatus()) {
reply.setDoReconnect(false);
bus.reply(msg, reply);
chain.next();
return;
}
PingCmd cmd = new PingCmd();
cmd.setUuid(self.getUuid());
restf.asyncJsonPost(buildUrl(vr.getManagementNic().getIp(), VirtualRouterConstant.VR_PING), cmd, new JsonAsyncRESTCallback<PingRsp>(msg, chain) {
@Override
public void fail(ErrorCode err) {
reply.setDoReconnect(true);
reply.setConnected(false);
logger.warn(String.format("failed to ping the virtual router vm[uuid:%s], %s. We will reconnect it soon", self.getUuid(), reply.getError()));
bus.reply(msg, reply);
chain.next();
}
@Override
public void success(PingRsp ret) {
reply.setDoReconnect(true);
if (!ret.isSuccess()) {
logger.warn(String.format("failed to ping the virtual router vm[uuid:%s], %s. We will reconnect it soon", self.getUuid(), ret.getError()));
reply.setConnected(false);
} else {
boolean connected = self.getUuid().equals(ret.getUuid());
if (!connected) {
logger.warn(String.format("a signature lost on the virtual router vm[uuid:%s] changed, it's probably caused by the agent restart. We will issue a reconnect soon", self.getUuid()));
} else {
connected = ApplianceVmStatus.Connected == getSelf().getStatus();
}
reply.setConnected(connected);
reply.setHaStatus(ret.getHaStatus());
if ((ret.getHealthy() != null) && (!ret.getHealthy()) && (ret.getHealthDetail() != null)) {
fireServiceUnhealthyCanonicalEvent(inerr("virtual router %s unhealthy, detail %s", getSelf().getUuid(), ret.getHealthDetail()));
} else {
fireServicehealthyCanonicalEvent();
}
}
bus.reply(msg, reply);
chain.next();
}
@Override
public Class<PingRsp> getReturnClass() {
return PingRsp.class;
}
}, TimeUnit.MINUTES, 1);
}
@Override
public String getName() {
return "ping-virtual-router";
}
});
}
private void handle(final ReconnectVirtualRouterVmMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final ReconnectVirtualRouterVmReply reply = new ReconnectVirtualRouterVmReply();
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
if (msg.isStatusChange()) {
changeApplianceVmStatus(ApplianceVmStatus.Disconnected);
}
reply.setError(allowed);
bus.reply(msg, reply);
chain.next();
return;
}
reconnect(new Completion(msg, chain) {
@Override
public void success() {
bus.reply(msg, reply);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-virtual-router-%s", self.getUuid());
}
});
}
protected String buildUrl(String mgmtIp, String path) {
return vrMgr.buildUrl(mgmtIp, path);
}
private void handle(final VirtualRouterAsyncHttpCallMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return String.format("%s-commands", syncThreadName);
}
@Override
public void run(final SyncTaskChain chain) {
refreshVO();
final VirtualRouterAsyncHttpCallReply reply = new VirtualRouterAsyncHttpCallReply();
if (msg.isCheckStatus() && getSelf().getState() != VmInstanceState.Running) {
throw new OperationFailureException(operr("the virtual router[name:%s, uuid:%s, current state:%s] is not running," +
"and cannot perform required operation. Please retry your operation later once it is running", self.getName(), self.getUuid(), self.getState()));
}
if (msg.isCheckStatus() && getSelf().getStatus() != ApplianceVmStatus.Connected) {
throw new OperationFailureException(operr("virtual router[uuid:%s] is in status of %s that cannot make http call to %s",
self.getUuid(), getSelf().getStatus(), msg.getPath()));
}
if (vr.getManagementNic() == null) {
throw new OperationFailureException(operr("virtual router[uuid:%s] has no management nic that cannot make http call to %s",
self.getUuid(), msg.getPath()));
}
restf.asyncJsonPost(buildUrl(vr.getManagementNic().getIp(), msg.getPath()), msg.getCommand(), new JsonAsyncRESTCallback<LinkedHashMap>(msg, chain) {
@Override
public void fail(ErrorCode err) {
reply.setError(err);
bus.reply(msg, reply);
chain.next();
}
@Override
public void success(LinkedHashMap ret) {
reply.setResponse(ret);
bus.reply(msg, reply);
chain.next();
}
@Override
public Class<LinkedHashMap> getReturnClass() {
return LinkedHashMap.class;
}
});
}
@Override
protected int getSyncLevel() {
return vrMgr.getParallelismDegree(self.getUuid());
}
@Override
public String getName() {
return getSyncSignature();
}
});
}
private void handle(final APIUpdateVirtualRouterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final APIUpdateVirtualRouterEvent evt = new APIUpdateVirtualRouterEvent(msg.getId());
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
evt.setError(allowed);
bus.publish(evt);
chain.next();
return;
}
updateVirutalRouter(msg, new Completion(msg, chain) {
@Override
public void success() {
VirtualRouterVmVO vrVO = dbf.findByUuid(msg.getVmInstanceUuid(), VirtualRouterVmVO.class);
evt.setInventory((VirtualRouterVmInventory.valueOf(vrVO)));
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("update-virtual-router-%s", self.getUuid());
}
});
}
@Transactional
protected void replaceVirtualRouterDefaultNetwork(String vrUuid, String oldL3Uuid, String newL3Uuud) {
defaultL3ConfigProxy.detachNetworkService(vrUuid, VirtualRouterConstant.VR_DEFAULT_ROUTE_NETWORK,
Collections.singletonList(oldL3Uuid));
defaultL3ConfigProxy.attachNetworkService(vrUuid, VirtualRouterConstant.VR_DEFAULT_ROUTE_NETWORK,
Collections.singletonList(newL3Uuud));
}
private void updateVirutalRouter(APIUpdateVirtualRouterMsg msg, final Completion completion) {
VirtualRouterVmVO vrVO = dbf.findByUuid(msg.getVmInstanceUuid(), VirtualRouterVmVO.class);
FlowChain fchain = FlowChainBuilder.newSimpleFlowChain();
fchain.setName(String.format("update-virtual-router-%s", msg.getVmInstanceUuid()));
fchain.then(new Flow() {
String __name__ = "update-virtual-router-db";
@Override
public void run(FlowTrigger trigger, Map data) {
replaceVirtualRouterDefaultNetwork(msg.getVmInstanceUuid(), vrVO.getDefaultRouteL3NetworkUuid(),
msg.getDefaultRouteL3NetworkUuid());
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
replaceVirtualRouterDefaultNetwork(msg.getVmInstanceUuid(), msg.getDefaultRouteL3NetworkUuid(),
vrVO.getDefaultRouteL3NetworkUuid());
trigger.rollback();
}
}).then(new Flow() {
String __name__ = "release-old-snat-of-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicVO oldNic = null;
for (VmNicVO nic: vrVO.getVmNics()) {
if (nic.getL3NetworkUuid().equals(vrVO.getDefaultRouteL3NetworkUuid())) {
oldNic = nic;
break;
}
}
if (oldNic == null) {
trigger.next();
return;
}
String vipIp = oldNic.getIp();
if (vrVO.getDefaultRouteL3NetworkUuid().equals(vrVO.getManagementNetworkUuid())) {
VmNicInventory publicNic = vrMgr.getSnatPubicInventory(VirtualRouterVmInventory.valueOf(vrVO));
vipIp = publicNic.getIp();
}
VipVO vipVO = Q.New(VipVO.class).eq(VipVO_.ip, vipIp)
.eq(VipVO_.l3NetworkUuid, oldNic.getL3NetworkUuid()).find();
if (vipVO == null) {
trigger.next();
return;
}
data.put("oldVip", vipVO);
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VipVO vipVO = (VipVO) data.get("oldVip");
if (vipVO == null) {
trigger.rollback();
return;
}
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(trigger) {
@Override
public void success() {
trigger.rollback();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.rollback();
}
});
}
}).then(new Flow() {
String __name__ = "apply-new-snat-of-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicVO newNic = null;
for (VmNicVO nic: vrVO.getVmNics()) {
if (nic.getL3NetworkUuid().equals(msg.getDefaultRouteL3NetworkUuid())) {
newNic = nic;
break;
}
}
if (newNic == null) {
trigger.fail(argerr("virtual router [uuid:%s] does not has nic in l3 network [uuid:s]", vrVO.getUuid(),
msg.getDefaultRouteL3NetworkUuid()));
return;
}
String vipIp = newNic.getIp();
if (msg.getDefaultRouteL3NetworkUuid().equals(vrVO.getManagementNetworkUuid())) {
VirtualRouterVmInventory vrInv = VirtualRouterVmInventory.valueOf(vrVO);
vrInv.setDefaultRouteL3NetworkUuid(msg.getDefaultRouteL3NetworkUuid());
VmNicInventory publicNic = vrMgr.getSnatPubicInventory(vrInv);
vipIp = publicNic.getIp();
}
VipVO vipVO = Q.New(VipVO.class).eq(VipVO_.ip, vipIp)
.eq(VipVO_.l3NetworkUuid, newNic.getL3NetworkUuid()).find();
if (vipVO == null) {
trigger.fail(argerr("there is no vip [ip:%s] in l3 network [uuid:%s]", vipIp,
msg.getDefaultRouteL3NetworkUuid()));
return;
}
data.put("newVip", vipVO);
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VipVO vipVO = (VipVO) data.get("newVip");
if (vipVO == null) {
trigger.rollback();
return;
}
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(NetworkServiceType.SNAT.toString());
struct.setServiceUuid(vipVO.getUuid());
Vip vip = new Vip(vipVO.getUuid());
vip.setStruct(struct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.rollback();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.rollback();
}
});
}
}).then(new NoRollbackFlow() {
String __name__ = "update-virtual-router-backend";
@Override
public void run(FlowTrigger trigger, Map data) {
vrMgr.changeVirutalRouterDefaultL3Network(msg.getVmInstanceUuid(), msg.getDefaultRouteL3NetworkUuid(), vrVO.getDefaultRouteL3NetworkUuid(), new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
Map<String, Object> haData = new HashMap<>();
haData.put(VirtualRouterHaCallbackInterface.Params.TaskName.toString(), VirtualRouterConstant.VR_CHANGE_DEFAULT_ROUTE_JOB);
haData.put(VirtualRouterHaCallbackInterface.Params.OriginRouterUuid.toString(), msg.getVmInstanceUuid());
haData.put(VirtualRouterHaCallbackInterface.Params.Struct.toString(), msg.getDefaultRouteL3NetworkUuid());
haData.put(VirtualRouterHaCallbackInterface.Params.Struct1.toString(), vrVO.getDefaultRouteL3NetworkUuid());
haBackend.submitVirutalRouterHaTask(haData, completion);
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private void handle(final APIReconnectVirtualRouterMsg msg) {
thdf.chainSubmit(new ChainTask(msg) {
@Override
public String getSyncSignature() {
return syncThreadName;
}
@Override
public void run(final SyncTaskChain chain) {
final APIReconnectVirtualRouterEvent evt = new APIReconnectVirtualRouterEvent(msg.getId());
refreshVO();
ErrorCode allowed = validateOperationByState(msg, self.getState(), SysErrors.OPERATION_ERROR);
if (allowed != null) {
evt.setError(allowed);
bus.publish(evt);
chain.next();
return;
}
reconnect(new Completion(msg, chain) {
@Override
public void success() {
evt.setInventory((ApplianceVmInventory) getSelfInventory());
bus.publish(evt);
chain.next();
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
chain.next();
}
});
}
@Override
public String getName() {
return String.format("reconnect-virtual-router-%s", self.getUuid());
}
});
}
private void reconnect(final Completion completion) {
ApplianceVmStatus oldStatus = getSelf().getStatus();
FlowChain chain = getReconnectChain();
chain.setName(String.format("reconnect-virtual-router-%s", self.getUuid()));
chain.getData().put(VirtualRouterConstant.Param.VR.toString(), vr);
chain.getData().put(Param.IS_RECONNECT.toString(), Boolean.TRUE.toString());
chain.getData().put(Params.isReconnect.toString(), Boolean.TRUE.toString());
chain.getData().put(Params.managementNicIp.toString(), vr.getManagementNic().getIp());
chain.getData().put(Params.applianceVmUuid.toString(), self.getUuid());
SimpleQuery<ApplianceVmFirewallRuleVO> q = dbf.createQuery(ApplianceVmFirewallRuleVO.class);
q.add(ApplianceVmFirewallRuleVO_.applianceVmUuid, Op.EQ, getSelf().getUuid());
List<ApplianceVmFirewallRuleVO> vos = q.list();
List<ApplianceVmFirewallRuleInventory> rules = ApplianceVmFirewallRuleInventory.valueOf(vos);
chain.getData().put(ApplianceVmConstant.Params.applianceVmFirewallRules.toString(), rules);
chain.insert(new Flow() {
String __name__ = "change-appliancevm-status-to-connecting";
@Override
public void run(FlowTrigger trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Connecting);
trigger.next();
}
@Override
public void rollback(FlowRollback trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Disconnected);
fireDisconnectedCanonicalEvent(operr("appliance vm %s reconnect failed",
getSelf().getUuid()));
trigger.rollback();
}
}).then(new NoRollbackFlow() {
String __name__ = "change-appliancevm-status-to-connected";
@Override
public void run(FlowTrigger trigger, Map data) {
changeApplianceVmStatus(ApplianceVmStatus.Connected);
trigger.next();
}
}).done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
self = dbf.reload(self);
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
if (oldStatus == ApplianceVmStatus.Connected) {
fireDisconnectedCanonicalEvent(errCode);
}
completion.fail(errCode);
}
}).start();
}
public class virtualRouterAfterAttachNicFlow extends NoRollbackFlow {
@Override
public void run(FlowTrigger trigger, Map data) {
boolean applyToVirtualRouter = (boolean)data.get(Param.APPLY_TO_VIRTUALROUTER.toString());
if (!applyToVirtualRouter) {
trigger.next();
return;
}
VmNicInventory nicInventory = (VmNicInventory) data.get(Param.VR_NIC.toString());
L3NetworkVO l3NetworkVO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, nicInventory.getL3NetworkUuid()).find();
VirtualRouterCommands.ConfigureNicCmd cmd = new VirtualRouterCommands.ConfigureNicCmd();
VirtualRouterCommands.NicInfo info = new VirtualRouterCommands.NicInfo();
info.setDefaultRoute(false);
info.setMac(nicInventory.getMac());
info.setNetmask(nicInventory.getNetmask());
for (UsedIpInventory ip : nicInventory.getUsedIps()) {
if (ip.getIpVersion() == IPv6Constants.IPv4) {
info.setIp(ip.getIp());
info.setGateway(ip.getGateway());
info.setNetmask(ip.getNetmask());
} else {
info.setIp6(ip.getIp());
info.setGateway6(ip.getGateway());
NormalIpRangeVO ipr = Q.New(NormalIpRangeVO.class).eq(NormalIpRangeVO_.uuid, ip.getIpRangeUuid()).find();
info.setPrefixLength(ipr.getPrefixLen());
info.setAddressMode(ipr.getAddressMode());
}
}
L2NetworkVO l2NetworkVO = Q.New(L2NetworkVO.class).eq(L2NetworkVO_.uuid, l3NetworkVO.getL2NetworkUuid()).find();
info.setCategory(l3NetworkVO.getCategory().toString());
info.setL2type(l2NetworkVO.getType());
info.setPhysicalInterface(l2NetworkVO.getPhysicalInterface());
for (L2NetworkGetVniExtensionPoint ext : pluginRgty.getExtensionList(L2NetworkGetVniExtensionPoint.class)) {
if (ext.getL2NetworkVniType().equals(l2NetworkVO.getType())) {
info.setVni(ext.getL2NetworkVni(l2NetworkVO.getUuid(), vr.getHostUuid()));
}
}
info.setMtu(new MtuGetter().getMtu(l3NetworkVO.getUuid()));
cmd.setNics(Arrays.asList(info));
VirtualRouterAsyncHttpCallMsg cmsg = new VirtualRouterAsyncHttpCallMsg();
cmsg.setCommand(cmd);
cmsg.setPath(VirtualRouterConstant.VR_CONFIGURE_NIC_PATH);
cmsg.setVmInstanceUuid(vr.getUuid());
cmsg.setCheckStatus(true);
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, vr.getUuid());
bus.send(cmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
trigger.fail(reply.getError());
return;
}
VirtualRouterAsyncHttpCallReply re = reply.castReply();
VirtualRouterCommands.ConfigureNicRsp rsp = re.toResponse(VirtualRouterCommands.ConfigureNicRsp.class);
if (rsp.isSuccess()) {
logger.debug(String.format("successfully add nic[ip:%s, mac:%s] to virtual router vm[uuid:%s, ip:%s]",
info.getIp(), info.getMac(), vr.getUuid(), vr.getManagementNic().getIp()));
trigger.next();
} else {
ErrorCode err = operr("unable to add nic[ip:%s, mac:%s] to virtual router vm[uuid:%s ip:%s], because %s",
info.getIp(), info.getMac(), vr.getUuid(), vr.getManagementNic().getIp(), rsp.getError());
trigger.fail(err);
}
}
});
}
}
private class virtualRouterApplyServicesAfterAttachNicFlow implements Flow {
String __name__ = "virtualRouter-apply-services-afterAttachNic";
private void virtualRouterApplyServicesAfterAttachNic(Iterator<VirtualRouterAfterAttachNicExtensionPoint> it, VmNicInventory nicInv, Completion completion){
if (!it.hasNext()) {
completion.success();
return;
}
VirtualRouterAfterAttachNicExtensionPoint ext = it.next();
logger.debug(String.format("execute afterAttachNic extension %s", ext.getClass().getSimpleName()));
ext.afterAttachNic(nicInv, new Completion(completion) {
@Override
public void success() {
virtualRouterApplyServicesAfterAttachNic(it, nicInv, completion);
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
boolean applyToVirtualRouter = (boolean)data.get(Param.APPLY_TO_VIRTUALROUTER.toString());
if (!applyToVirtualRouter) {
trigger.next();
return;
}
if (nicInv.isIpv6OnlyNic()) {
trigger.next();
return;
}
Iterator<VirtualRouterAfterAttachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterAfterAttachNicExtensionPoint.class).iterator();
virtualRouterApplyServicesAfterAttachNic(it, nicInv, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
private void virtualRouterApplyServicesAfterAttachNicRollback(Iterator<VirtualRouterAfterAttachNicExtensionPoint> it, VmNicInventory nicInv, NoErrorCompletion completion){
if (!it.hasNext()) {
completion.done();
return;
}
VirtualRouterAfterAttachNicExtensionPoint ext = it.next();
ext.afterAttachNicRollback(nicInv, new NoErrorCompletion(completion) {
@Override
public void done() {
virtualRouterApplyServicesAfterAttachNicRollback(it, nicInv, completion);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
Iterator<VirtualRouterAfterAttachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterAfterAttachNicExtensionPoint.class).iterator();
virtualRouterApplyServicesAfterAttachNicRollback(it, nicInv, new NoErrorCompletion() {
@Override
public void done() {
trigger.rollback();
}
});
}
}
@Override
protected void afterAttachNic(VmNicInventory nicInventory, Completion completion) {
super.afterAttachNic(nicInventory, true, completion);
}
@Override
protected void afterAttachNic(VmNicInventory nicInventory, boolean applyToBackend, Completion completion) {
VmNicVO vo = Q.New(VmNicVO.class).eq(VmNicVO_.uuid, nicInventory.getUuid()).find();
L3NetworkVO l3NetworkVO = Q.New(L3NetworkVO.class).eq(L3NetworkVO_.uuid, vo.getL3NetworkUuid()).find();
if (l3NetworkVO.getCategory().equals(L3NetworkCategory.Private)) {
vo.setMetaData(GUEST_NIC_MASK.toString());
UsedIpVO usedIpVO = Q.New(UsedIpVO.class).eq(UsedIpVO_.uuid, nicInventory.getUsedIpUuid()).find();
usedIpVO.setMetaData(GUEST_NIC_MASK.toString());
dbf.updateAndRefresh(usedIpVO);
} else {
vo.setMetaData(ADDITIONAL_PUBLIC_NIC_MASK.toString());
}
vo = dbf.updateAndRefresh(vo);
logger.debug(String.format("updated metadata of vmnic[uuid: %s]", vo.getUuid()));
VirtualRouterVmVO vrVo = dbf.findByUuid(self.getUuid(), VirtualRouterVmVO.class);
Map<String, Object> data = new HashMap<String, Object>();
data.put(Param.VR_NIC.toString(), VmNicInventory.valueOf(vo));
data.put(Param.SNAT.toString(), Boolean.FALSE);
data.put(Param.VR.toString(), VirtualRouterVmInventory.valueOf(vrVo));
data.put(Param.APPLY_TO_VIRTUALROUTER.toString(), applyToBackend);
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("apply-services-after-attach-nic-%s-from-virtualrouter-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid()));
chain.setData(data);
chain.then(new virtualRouterAfterAttachNicFlow());
chain.then(new VirtualRouterCreatePublicVipFlow());
chain.then(new virtualRouterApplyServicesAfterAttachNicFlow());
chain.then(haBackend.getAttachL3NetworkFlow());
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
private void virtualRouterAfterDetachNic(Iterator<VirtualRouterAfterDetachNicExtensionPoint> exts, VmNicInventory nicInventory, Completion completion) {
if (!exts.hasNext()) {
completion.success();
return;
}
VirtualRouterAfterDetachNicExtensionPoint ext = exts.next();
ext.afterDetachVirtualRouterNic(nicInventory, new Completion(completion) {
@Override
public void success() {
virtualRouterAfterDetachNic(exts, nicInventory, completion);
}
@Override
public void fail(ErrorCode errorCode) {
virtualRouterAfterDetachNic(exts, nicInventory, completion);
}
});
}
@Override
protected void afterDetachNic(VmNicInventory nicInventory, boolean isRollback, Completion completion) {
if (isRollback) {
completion.success();
return;
}
List<VirtualRouterAfterDetachNicExtensionPoint> exts = pluginRgty.getExtensionList(VirtualRouterAfterDetachNicExtensionPoint.class);
virtualRouterAfterDetachNic(exts.iterator(), nicInventory, new Completion(completion) {
@Override
public void success() {
haBackend.detachL3NetworkFromVirtualRouterHaGroup(nicInventory.getVmInstanceUuid(),
nicInventory.getL3NetworkUuid(), isRollback, completion);
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
}
@Configurable(preConstruction = true, autowire = Autowire.BY_TYPE)
private static class virtualRouterReleaseVipbeforeDetachNic extends NoRollbackFlow {
@Autowired
private VipManager vipMgr;
String __name__ = "virtualRouter-beforeDetachNic";
private void virtualRouterReleaseVipServices(Iterator<String> it, VipInventory vip, Completion completion) {
if (!it.hasNext()) {
completion.success();
return;
}
String service = it.next();
VipReleaseExtensionPoint ext = vipMgr.getVipReleaseExtensionPoint(service);
ext.releaseServicesOnVip(vip, new Completion(completion) {
@Override
public void success() {
virtualRouterReleaseVipServices(it, vip, completion);
}
@Override
public void fail(ErrorCode errorCode) {
/* even failed, continue release */
virtualRouterReleaseVipServices(it, vip, completion);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nic = (VmNicInventory) data.get(Param.VR_NIC.toString());
if (!VirtualRouterNicMetaData.GUEST_NIC_MASK_STRING_LIST.contains(nic.getMetaData())) {
trigger.next();
return;
}
if (VirtualRouterSystemTags.DEDICATED_ROLE_VR.hasTag(nic.getVmInstanceUuid())) {
trigger.next();
return;
}
VirtualRouterVmInventory vr = VirtualRouterVmInventory.valueOf((VirtualRouterVmVO)
Q.New(VirtualRouterVmVO.class).eq(VirtualRouterVmVO_.uuid, nic.getVmInstanceUuid()).find());
/*
* fixme: to be done, the active will not done in standby vpc router because
* VipPeerL3NetworkRefVO record has been deleted during that of master router
* this will result the vip delete action will not done in agent before detach nic and
* not delete some configure such as vip QoS, ifbx.
*/
List<VipVO> vips = SQL.New("select distinct vip from VipVO vip, VipPeerL3NetworkRefVO ref " +
"where ref.vipUuid = vip.uuid and ref.l3NetworkUuid in (:routerNetworks) " +
"and vip.l3NetworkUuid = :l3Uuid")
.param("l3Uuid", nic.getL3NetworkUuid())
.param("routerNetworks", vr.getAllL3Networks())
.list();
if (vips.isEmpty()) {
trigger.next();
return;
}
ErrorCodeList errList = new ErrorCodeList();
new While<>(vips).all((vip, completion) -> {
Set<String> services = vip.getServicesTypes();
if (services == null || services.isEmpty()) {
completion.done();
return;
}
virtualRouterReleaseVipServices(services.iterator(), VipInventory.valueOf(vip), new Completion(completion) {
@Override
public void success() {
completion.done();
}
@Override
public void fail(ErrorCode errorCode) {
errList.getCauses().add(errorCode);
completion.done();
}
});
}).run(new NoErrorCompletion() {
@Override
public void done() {
if (errList.getCauses().size() > 0) {
trigger.fail(errList.getCauses().get(0));
} else {
trigger.next();
}
}
});
}
}
public class virtualRouterbeforeDetachNic extends NoRollbackFlow {
String __name__ = "virtualRouter-beforeDetachNic";
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInventory = (VmNicInventory) data.get(Param.VR_NIC.toString());
VirtualRouterCommands.RemoveNicCmd cmd = new VirtualRouterCommands.RemoveNicCmd();
VirtualRouterCommands.NicInfo info = new VirtualRouterCommands.NicInfo();
info.setIp(nicInventory.getIp());
info.setDefaultRoute(false);
info.setGateway(nicInventory.getGateway());
info.setMac(nicInventory.getMac());
info.setNetmask(nicInventory.getNetmask());
cmd.setNics(Arrays.asList(info));
VirtualRouterAsyncHttpCallMsg cmsg = new VirtualRouterAsyncHttpCallMsg();
cmsg.setCommand(cmd);
cmsg.setPath(VirtualRouterConstant.VR_REMOVE_NIC_PATH);
cmsg.setVmInstanceUuid(vr.getUuid());
cmsg.setCheckStatus(true);
bus.makeTargetServiceIdByResourceUuid(cmsg, VmInstanceConstant.SERVICE_ID, vr.getUuid());
bus.send(cmsg, new CloudBusCallBack(trigger) {
@Override
public void run(MessageReply reply) {
if (!reply.isSuccess()) {
logger.warn(String.format("detach nic[%s] from virtual router vm[uuid:%s, ip:%s] failed because %s",
info, vr.getUuid(), vr.getManagementNic().getIp(), reply.getError().getDetails()));
trigger.next();
return;
}
VirtualRouterAsyncHttpCallReply re = reply.castReply();
VirtualRouterCommands.RemoveNicRsp rsp = re.toResponse(VirtualRouterCommands.RemoveNicRsp.class);
if (rsp.isSuccess()) {
logger.debug(String.format("successfully detach nic[%s] from virtual router vm[uuid:%s, ip:%s]",info, vr.getUuid(), vr.getManagementNic()
.getIp()));
trigger.next();
} else {
logger.warn(String.format("unable to detach nic[%s] from virtual router vm[uuid:%s ip:%s], because %s",
info, vr.getUuid(), vr.getManagementNic().getIp(), rsp.getError()));
trigger.next();;
}
}
});
}
}
private class virtualRouterReleaseServicesbeforeDetachNicFlow implements Flow {
String __name__ = "virtualRouter-release-services-before-detach-nic";
private void virtualRouterReleaseServices(final Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it, VmNicInventory nicInv, Completion completion) {
if (!it.hasNext()) {
completion.success();
return;
}
VirtualRouterBeforeDetachNicExtensionPoint ext = it.next();
logger.debug(String.format("virtual router release service before detach l3 network for %s", ext.getClass().getSimpleName()));
ext.beforeDetachNic(nicInv, new Completion(completion) {
@Override
public void success() {
virtualRouterReleaseServices(it, nicInv, completion);
}
@Override
public void fail(ErrorCode errorCode) {
/* even failed, continue the release process */
virtualRouterReleaseServices(it, nicInv, completion);
}
});
}
@Override
public void run(FlowTrigger trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterBeforeDetachNicExtensionPoint.class).iterator();
virtualRouterReleaseServices(it, nicInv, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
private void virtualRouterReleaseServicesRollback(final Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it, VmNicInventory nicInv, NoErrorCompletion completion) {
if (!it.hasNext()) {
completion.done();
return;
}
VirtualRouterBeforeDetachNicExtensionPoint ext = it.next();
ext.beforeDetachNicRollback(nicInv, new NoErrorCompletion(completion) {
@Override
public void done() {
virtualRouterReleaseServicesRollback(it, nicInv, completion);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
VmNicInventory nicInv = (VmNicInventory) data.get(Param.VR_NIC.toString());
Iterator<VirtualRouterBeforeDetachNicExtensionPoint> it = pluginRgty.getExtensionList(VirtualRouterBeforeDetachNicExtensionPoint.class).iterator();
virtualRouterReleaseServicesRollback(it, nicInv, new NoErrorCompletion(trigger) {
@Override
public void done() {
trigger.rollback();
}
});
}
}
@Override
protected void beforeDetachNic(VmNicInventory nicInventory, Completion completion) {
Map data = new HashMap();
data.put(Param.VR_NIC.toString(), nicInventory);
data.put(Param.VR.toString(), vr);
ApplianceVmVO appvm = Q.New(ApplianceVmVO.class)
.eq(ApplianceVmVO_.uuid, nicInventory.getVmInstanceUuid()).find();
if (appvm.getStatus().equals(ApplianceVmStatus.Disconnected)) {
logger.debug(String.format("appliance vm[uuid: %s] current status is [%s], skip before detach nic",
appvm.getUuid(), appvm.getStatus()));
completion.success();
return;
}
if (appvm.getState().equals(VmInstanceState.Stopped)) {
logger.debug(String.format("appliance vm[uuid: %s] current state is [%s], skip before detach nic",
appvm.getUuid(), appvm.getStatus()));
completion.success();
return;
}
FlowChain chain = FlowChainBuilder.newSimpleFlowChain();
chain.setName(String.format("release-services-before-detach-nic-%s-from-virtualrouter-%s", nicInventory.getUuid(), nicInventory.getVmInstanceUuid()));
chain.setData(data);
chain.insert(new virtualRouterReleaseServicesbeforeDetachNicFlow());
chain.then(new virtualRouterReleaseVipbeforeDetachNic());
chain.then(new virtualRouterbeforeDetachNic());
chain.done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
}).error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
}).start();
}
}
|
[BugFix: ZSTACK-33695] move PingVirtualRouterVmMsg to a separate message queue
|
plugin/virtualRouterProvider/src/main/java/org/zstack/network/service/virtualrouter/VirtualRouter.java
|
[BugFix: ZSTACK-33695] move PingVirtualRouterVmMsg to a separate message queue
|
|
Java
|
apache-2.0
|
4b13450a9c731cf57d4775c678bf24b08faec417
| 0
|
google/mug,google/mug
|
/*****************************************************************************
* ------------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*****************************************************************************/
package com.google.mu.util.graph;
import static com.google.mu.util.stream.MoreStreams.whileNotNull;
import static java.util.Arrays.asList;
import static java.util.Objects.requireNonNull;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Queue;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import com.google.mu.util.stream.Iteration;
/**
* Walker for binary tree topology (see {@link Walker#inBinaryTree Walker.inBinaryTree()}).
*
* <p>Besides {@link #preOrderFrom pre-order}, {@link #postOrderFrom post-order} and {@link
* #breadthFirstFrom breadth-first} traversals, also supports {@link #inOrderFrom in-order}.
*
* @param <N> the tree node type
* @since 4.2
*/
public final class BinaryTreeWalker<N> extends Walker<N> {
private final UnaryOperator<N> getLeft;
private final UnaryOperator<N> getRight;
BinaryTreeWalker(UnaryOperator<N> getLeft, UnaryOperator<N> getRight) {
this.getLeft = requireNonNull(getLeft);
this.getRight = requireNonNull(getRight);
}
/**
* Returns a lazy stream for breadth-first traversal from {@code root}.
* Empty stream is returned if {@code roots} is empty.
*/
public Stream<N> breadthFirstFrom(Iterable<? extends N> roots) {
return topDown(roots, Queue::add);
}
/**
* Returns a lazy stream for pre-order traversal from {@code roots}.
* Empty stream is returned if {@code roots} is empty.
*/
@Override public Stream<N> preOrderFrom(Iterable<? extends N> roots) {
return inBinaryTree(getRight, getLeft).topDown(roots, Deque::push);
}
/**
* Returns a lazy stream for post-order traversal from {@code root}.
* Empty stream is returned if {@code roots} is empty.
*
* <p>For small or medium sized in-memory trees, it's equivalent and more efficient to first
* collect the nodes into a list in "reverse post order", and then use {@code
* Collections.reverse()}, as in:
* <pre>{@code
* List<Node> nodes =
* Walker.inBinaryTree(Tree::right, Tree::left) // 1. flip left to right
* .preOrderFrom(root) // 2. pre-order
* .collect(toCollection(ArrayList::new)); // 3. in reverse-post-order
* Collections.reverse(nodes); // 4. reverse to get post-order
* }</pre>
*
* Or, use the {@link com.google.mu.util.stream.MoreStreams#toListAndThen toListAndThen()}
* collector to do it in one-liner:
* <pre>{@code
* List<Node> nodes =
* Walker.inBinaryTree(Tree::right, Tree::left)
* .preOrderFrom(root)
* .collect(toListAndThen(Collections::reverse));
* }</pre>
*/
public Stream<N> postOrderFrom(Iterable<? extends N> roots) {
DepthFirst iteration = new DepthFirst();
for (N root : roots) {
requireNonNull(root);
iteration.yield(() -> iteration.postOrder(root));
}
return iteration.stream();
}
/**
* Returns a lazy stream for in-order traversal from {@code roots}.
* Empty stream is returned if {@code roots} is empty.
*/
@SafeVarargs public final Stream<N> inOrderFrom(N... roots) {
return inOrderFrom(asList(roots));
}
/**
* Returns a lazy stream for in-order traversal from {@code roots}.
* Empty stream is returned if {@code roots} is empty.
*/
public Stream<N> inOrderFrom(Iterable<? extends N> roots) {
DepthFirst iteration = new DepthFirst();
for (N root : roots) {
requireNonNull(root);
iteration.yield(() -> iteration.inOrder(root));
}
return iteration.stream();
}
private Stream<N> topDown(Iterable<? extends N> roots, InsertionOrder order) {
Deque<N> horizon = toDeque(roots);
return whileNotNull(horizon::poll)
.peek(n -> {
N left = getLeft.apply(n);
N right = getRight.apply(n);
if (left != null) order.insertInto(horizon, left);
if (right != null) order.insertInto(horizon, right);
});
}
private final class DepthFirst extends Iteration<N> {
void inOrder(N root) {
N left = getLeft.apply(root);
N right = getRight.apply(root);
if (left == null && right == null) { // Minimize allocation for leaf nodes.
yield(root);
} else {
yield(() -> {
if (left != null) inOrder(left);
yield(root);
if (right != null) inOrder(right);
});
}
}
void postOrder(N root) {
N left = getLeft.apply(root);
N right = getRight.apply(root);
if (left == null && right == null) { // Minimize allocation for leaf nodes.
yield(root);
} else {
yield(() -> {
if (left != null) postOrder(left);
if (right != null) postOrder(right);
yield(root);
});
}
}
}
private static <N> Deque<N> toDeque(Iterable<? extends N> nodes) {
Deque<N> deque = new ArrayDeque<>();
for (N node : nodes) deque.add(node);
return deque;
}
}
|
core/src/main/java/com/google/mu/util/graph/BinaryTreeWalker.java
|
/*****************************************************************************
* ------------------------------------------------------------------------- *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*****************************************************************************/
package com.google.mu.util.graph;
import static com.google.mu.util.stream.MoreStreams.whileNotNull;
import static java.util.Arrays.asList;
import static java.util.Objects.requireNonNull;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Queue;
import java.util.function.BiConsumer;
import java.util.function.UnaryOperator;
import java.util.stream.Stream;
import com.google.mu.util.stream.Iteration;
/**
* Walker for binary tree topology (see {@link Walker#inBinaryTree Walker.inBinaryTree()}).
*
* <p>Besides {@link #preOrderFrom pre-order}, {@link #postOrderFrom post-order} and {@link
* #breadthFirstFrom breadth-first} traversals, also supports {@link #inOrderFrom in-order}.
*
* @param <N> the tree node type
* @since 4.2
*/
public final class BinaryTreeWalker<N> extends Walker<N> {
private final UnaryOperator<N> getLeft;
private final UnaryOperator<N> getRight;
BinaryTreeWalker(UnaryOperator<N> getLeft, UnaryOperator<N> getRight) {
this.getLeft = requireNonNull(getLeft);
this.getRight = requireNonNull(getRight);
}
/**
* Returns a lazy stream for breadth-first traversal from {@code root}.
* Empty stream is returned if {@code roots} is empty.
*/
public Stream<N> breadthFirstFrom(Iterable<? extends N> roots) {
return topDown(roots, Queue::add);
}
/**
* Returns a lazy stream for pre-order traversal from {@code roots}.
* Empty stream is returned if {@code roots} is empty.
*/
@Override public Stream<N> preOrderFrom(Iterable<? extends N> roots) {
return inBinaryTree(getRight, getLeft).topDown(roots, Deque::push);
}
/**
* Returns a lazy stream for post-order traversal from {@code root}.
* Empty stream is returned if {@code roots} is empty.
*
* <p>For small or medium sized in-memory trees, it's equivalent and more efficient to first
* collect the nodes into a list in "reverse post order", and then use {@code
* Collections.reverse()}, as in:
* <pre>{@code
* List<Node> nodes =
* Walker.inBinaryTree(Tree::right, Tree::left) // 1. flip left to right
* .preOrderFrom(root) // 2. pre-order
* .collect(toCollection(ArrayList::new)); // 3. in reverse-post-order
* Collections.reverse(nodes); // 4. reverse to get post-order
* }</pre>
*
* Or, use the {@link com.google.mu.util.stream.MoreStreams#toListAndThen toListAndThen()}
* collector to do it in one-liner:
* <pre>{@code
* List<Node> nodes =
* Walker.inBinaryTree(Tree::right, Tree::left)
* .preOrderFrom(root)
* .collect(toListAndThen(Collections::reverse));
* }</pre>
*/
public Stream<N> postOrderFrom(Iterable<? extends N> roots) {
DepthFirst iteration = new DepthFirst();
for (N root : roots) {
requireNonNull(root);
iteration.yield(() -> iteration.postOrder(root));
}
return iteration.stream();
}
/**
* Returns a lazy stream for in-order traversal from {@code roots}.
* Empty stream is returned if {@code roots} is empty.
*/
@SafeVarargs public final Stream<N> inOrderFrom(N... roots) {
return inOrderFrom(asList(roots));
}
/**
* Returns a lazy stream for in-order traversal from {@code roots}.
* Empty stream is returned if {@code roots} is empty.
*/
public Stream<N> inOrderFrom(Iterable<? extends N> roots) {
DepthFirst iteration = new DepthFirst();
for (N root : roots) {
requireNonNull(root);
iteration.yield(() -> iteration.inOrder(root));
}
return iteration.stream();
}
private Stream<N> topDown(Iterable<? extends N> roots, InsertionOrder order) {
Deque<N> horizon = toDeque(roots);
return whileNotNull(horizon::poll)
.peek(n -> {
N left = getLeft.apply(n);
N right = getRight.apply(n);
if (left != null) order.insertInto(horizon, left);
if (right != null) order.insertInto(horizon, right);
});
}
private final class DepthFirst extends Iteration<N> {
private void bottomUp(N node, BiConsumer<? super N, ? super N> order) {
N left = getLeft.apply(node);
N right = getRight.apply(node);
if (left == null && right == null) { // Minimize allocation for leaf nodes.
yield(node);
} else {
yield(() -> order.accept(left, right));
}
}
void inOrder(N root) {
N left = getLeft.apply(root);
N right = getRight.apply(root);
if (left == null && right == null) { // Minimize allocation for leaf nodes.
yield(root);
} else {
yield(() -> {
if (left != null) inOrder(left);
yield(root);
if (right != null) inOrder(right);
});
}
}
void postOrder(N root) {
N left = getLeft.apply(root);
N right = getRight.apply(root);
if (left == null && right == null) { // Minimize allocation for leaf nodes.
yield(root);
} else {
yield(() -> {
if (left != null) postOrder(left);
if (right != null) postOrder(right);
yield(root);
});
}
}
}
private static <N> Deque<N> toDeque(Iterable<? extends N> nodes) {
Deque<N> deque = new ArrayDeque<>();
for (N node : nodes) deque.add(node);
return deque;
}
}
|
Minimize allocation in BinaryTreeWalker
|
core/src/main/java/com/google/mu/util/graph/BinaryTreeWalker.java
|
Minimize allocation in BinaryTreeWalker
|
|
Java
|
apache-2.0
|
14aaf21d980e849bef9965b27f1fd362540cbeb6
| 0
|
googleinterns/step200-2020,googleinterns/step200-2020,googleinterns/step200-2020
|
package com.google.sps.servlets;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.PreparedQuery;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Query.SortDirection;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.common.flogger.FluentLogger;
import com.google.gson.Gson;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.annotation.WebServlet;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/** Servlet that returns start location and destinations user inputs */
@MultipartConfig
@WebServlet("/api/destinations")
public class DestinationsServlet extends HttpServlet {
private final UserLocations places = new UserLocations("", new ArrayList<String>());
private final Gson gson = new Gson();
private final DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private Key userKey;
@Override
public void init() {
Entity userEntity = new Entity("UserInputs");
userEntity.setProperty("start", "");
userEntity.setProperty("destinations", new ArrayList<String>());
datastore.put(userEntity);
userKey= userEntity.getKey();
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try{
entity = datastore.get(userKey);
String start = (String) entity.getProperty("start");
ArrayList<String> destinations = (ArrayList<String>) entity.getProperty("destinations");
UserLocations userLocations = new UserLocations(start, destinations);
System.out.println(userLocations.getDestinations());
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
} catch(EntityNotFoundException e){
logger.atInfo().withCause(e).log("Unable to find UserLocations Entity %s", userKey);
}
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try{
entity = datastore.get(userKey) ;
places.addDestination(request.getParameter("destinations"));
entity.setProperty("start", request.getParameter("start-location"));
entity.setProperty("destinations", places.getDestinations());
datastore.put(entity);
String start = (String) entity.getProperty("start");
ArrayList<String> destinations = (ArrayList) entity.getProperty("destinations");
UserLocations userLocations = new UserLocations(start, destinations);
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
} catch(EntityNotFoundException e){
logger.atInfo().withCause(e).log("Unable to find UserLocations Entity %s", userKey);
}
}
}
|
byway/src/main/java/com/google/sps/servlets/DestinationsServlet.java
|
package com.google.sps.servlets;
import com.google.appengine.api.datastore.DatastoreService;
import com.google.appengine.api.datastore.DatastoreServiceFactory;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.api.datastore.Key;
import com.google.appengine.api.datastore.PreparedQuery;
import com.google.appengine.api.datastore.Query;
import com.google.appengine.api.datastore.Query.SortDirection;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.gson.Gson;
import java.io.IOException;
import java.util.ArrayList;
import javax.servlet.annotation.WebServlet;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/** Servlet that returns start location and destinations user inputs */
@MultipartConfig
@WebServlet("/api/destinations")
public class DestinationsServlet extends HttpServlet {
private final UserLocations places = new UserLocations("", new ArrayList<String>());
private final Gson gson = new Gson();
private final DatastoreService datastore = DatastoreServiceFactory.getDatastoreService();
private Key userKey;
@Override
public void init() {
Entity userEntity = new Entity("UserInputs");
userEntity.setProperty("start", "");
userEntity.setProperty("destinations", new ArrayList<String>());
datastore.put(userEntity);
userKey= userEntity.getKey();
}
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try{
entity = datastore.get(userKey);
String start = (String) entity.getProperty("start");
ArrayList<String> destinations = (ArrayList<String>) entity.getProperty("destinations");
UserLocations userLocations = new UserLocations(start, destinations);
System.out.println(userLocations.getDestinations());
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
} catch(EntityNotFoundException e){
System.out.println("error");
}
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws IOException {
Entity entity;
try{
entity = datastore.get(userKey) ;
places.addDestination(request.getParameter("destinations"));
entity.setProperty("start", request.getParameter("start-location"));
entity.setProperty("destinations", places.getDestinations());
datastore.put(entity);
String start = (String) entity.getProperty("start");
ArrayList<String> destinations = (ArrayList) entity.getProperty("destinations");
UserLocations userLocations = new UserLocations(start, destinations);
response.setContentType("application/json;");
response.getWriter().println(gson.toJson(userLocations));
} catch(EntityNotFoundException e){}
}
}
|
add logging error message
|
byway/src/main/java/com/google/sps/servlets/DestinationsServlet.java
|
add logging error message
|
|
Java
|
apache-2.0
|
311cb7783b140ef74589b6790a5fc7f09759a3bf
| 0
|
mshuler/cassandra,emolsson/cassandra,adelapena/cassandra,jasonwee/cassandra,wreda/cassandra,driftx/cassandra,ollie314/cassandra,likaiwalkman/cassandra,pthomaid/cassandra,scylladb/scylla-tools-java,michaelsembwever/cassandra,sluk3r/cassandra,jrwest/cassandra,phact/cassandra,scaledata/cassandra,kgreav/cassandra,josh-mckenzie/cassandra,pauloricardomg/cassandra,christian-esken/cassandra,sedulam/CASSANDRA-12201,tongjixianing/projects,yangzhe1991/cassandra,GabrielNicolasAvellaneda/cassandra,aarushi12002/cassandra,spodkowinski/cassandra,chaordic/cassandra,chbatey/cassandra-1,tjake/cassandra,blambov/cassandra,a-buck/cassandra,stef1927/cassandra,jasobrown/cassandra,jeromatron/cassandra,juiceblender/cassandra,aweisberg/cassandra,newrelic-forks/cassandra,bdeggleston/cassandra,xiongzheng/Cassandra-Research,michaelmior/cassandra,wreda/cassandra,yukim/cassandra,joesiewert/cassandra,RyanMagnusson/cassandra,rdio/cassandra,pauloricardomg/cassandra,JeremiahDJordan/cassandra,mkjellman/cassandra,jasonwee/cassandra,pkdevbox/cassandra,yanbit/cassandra,mambocab/cassandra,bcoverston/cassandra,instaclustr/cassandra,driftx/cassandra,pcn/cassandra-1,stef1927/cassandra,thobbs/cassandra,jasobrown/cassandra,sharvanath/cassandra,kgreav/cassandra,modempachev4/kassandra,weideng1/cassandra,WorksApplications/cassandra,dkua/cassandra,bmel/cassandra,fengshao0907/cassandra-1,spodkowinski/cassandra,emolsson/cassandra,boneill42/cassandra,bmel/cassandra,cooldoger/cassandra,nutbunnies/cassandra,bpupadhyaya/cassandra,scylladb/scylla-tools-java,clohfink/cassandra,hhorii/cassandra,vaibhi9/cassandra,yonglehou/cassandra,jasonwee/cassandra,rmarchei/cassandra,swps/cassandra,boneill42/cassandra,bdeggleston/cassandra,iamaleksey/cassandra,jeromatron/cassandra,WorksApplications/cassandra,AtwooTM/cassandra,fengshao0907/Cassandra-Research,blerer/cassandra,ejankan/cassandra,pkdevbox/cassandra,thelastpickle/cassandra,mambocab/cassandra,sedulam/CASSANDRA-12201,carlyeks/cassandra,szhou1234/cassandra,pauloricardomg/cassandra,jeffjirsa/cassandra,bmel/cassandra,DICL/cassandra,snazy/cassandra,MasahikoSawada/cassandra,michaelsembwever/cassandra,rmarchei/cassandra,ejankan/cassandra,likaiwalkman/cassandra,codefollower/Cassandra-Research,modempachev4/kassandra,jrwest/cassandra,darach/cassandra,aureagle/cassandra,JeremiahDJordan/cassandra,weideng1/cassandra,ptuckey/cassandra,jeffjirsa/cassandra,DikangGu/cassandra,yukim/cassandra,rogerchina/cassandra,Jaumo/cassandra,bdeggleston/cassandra,hengxin/cassandra,scylladb/scylla-tools-java,pofallon/cassandra,phact/cassandra,hhorii/cassandra,modempachev4/kassandra,scaledata/cassandra,szhou1234/cassandra,likaiwalkman/cassandra,bdeggleston/cassandra,michaelmior/cassandra,codefollower/Cassandra-Research,michaelsembwever/cassandra,knifewine/cassandra,caidongyun/cassandra,yonglehou/cassandra,regispl/cassandra,jrwest/cassandra,knifewine/cassandra,yukim/cassandra,beobal/cassandra,jbellis/cassandra,driftx/cassandra,thelastpickle/cassandra,aarushi12002/cassandra,sivikt/cassandra,aweisberg/cassandra,mshuler/cassandra,apache/cassandra,tongjixianing/projects,chaordic/cassandra,jeromatron/cassandra,ben-manes/cassandra,jasonstack/cassandra,jbellis/cassandra,bcoverston/cassandra,ifesdjeen/cassandra,swps/cassandra,jasonstack/cassandra,christian-esken/cassandra,josh-mckenzie/cassandra,blambov/cassandra,WorksApplications/cassandra,scylladb/scylla-tools-java,apache/cassandra,ptuckey/cassandra,GabrielNicolasAvellaneda/cassandra,instaclustr/cassandra,pcmanus/cassandra,ollie314/cassandra,weipinghe/cassandra,yanbit/cassandra,krummas/cassandra,dongjiaqiang/cassandra,Jollyplum/cassandra,tongjixianing/projects,jkni/cassandra,hengxin/cassandra,Imran-C/cassandra,bpupadhyaya/cassandra,DICL/cassandra,darach/cassandra,nutbunnies/cassandra,cooldoger/cassandra,boneill42/cassandra,Imran-C/cassandra,ptnapoleon/cassandra,snazy/cassandra,darach/cassandra,hhorii/cassandra,kgreav/cassandra,kgreav/cassandra,weideng1/cassandra,ejankan/cassandra,iamaleksey/cassandra,tjake/cassandra,carlyeks/cassandra,spodkowinski/cassandra,mt0803/cassandra,newrelic-forks/cassandra,dkua/cassandra,macintoshio/cassandra,swps/cassandra,christian-esken/cassandra,guard163/cassandra,wreda/cassandra,codefollower/Cassandra-Research,beobal/cassandra,newrelic-forks/cassandra,tommystendahl/cassandra,mshuler/cassandra,bpupadhyaya/cassandra,mike-tr-adamson/cassandra,Jaumo/cassandra,ptnapoleon/cassandra,mkjellman/cassandra,beobal/cassandra,pauloricardomg/cassandra,pofallon/cassandra,michaelsembwever/cassandra,jasonstack/cassandra,chbatey/cassandra-1,fengshao0907/cassandra-1,pcmanus/cassandra,guard163/cassandra,guard163/cassandra,Instagram/cassandra,snazy/cassandra,apache/cassandra,aboudreault/cassandra,yhnishi/cassandra,strapdata/cassandra,belliottsmith/cassandra,belliottsmith/cassandra,JeremiahDJordan/cassandra,pthomaid/cassandra,blerer/cassandra,aweisberg/cassandra,yhnishi/cassandra,exoscale/cassandra,clohfink/cassandra,krummas/cassandra,apache/cassandra,mike-tr-adamson/cassandra,aarushi12002/cassandra,vramaswamy456/cassandra,vaibhi9/cassandra,cooldoger/cassandra,carlyeks/cassandra,joesiewert/cassandra,sharvanath/cassandra,pcmanus/cassandra,aboudreault/cassandra,fengshao0907/Cassandra-Research,weipinghe/cassandra,ben-manes/cassandra,a-buck/cassandra,a-buck/cassandra,strapdata/cassandra,juiceblender/cassandra,DikangGu/cassandra,mt0803/cassandra,Instagram/cassandra,regispl/cassandra,whitepages/cassandra,exoscale/cassandra,yangzhe1991/cassandra,rmarchei/cassandra,vramaswamy456/cassandra,jkni/cassandra,Jaumo/cassandra,xiongzheng/Cassandra-Research,jkni/cassandra,MasahikoSawada/cassandra,DICL/cassandra,rdio/cassandra,jeffjirsa/cassandra,iamaleksey/cassandra,GabrielNicolasAvellaneda/cassandra,strapdata/cassandra,RyanMagnusson/cassandra,sluk3r/cassandra,fengshao0907/Cassandra-Research,josh-mckenzie/cassandra,krummas/cassandra,pofallon/cassandra,jrwest/cassandra,jasobrown/cassandra,Instagram/cassandra,rdio/cassandra,tommystendahl/cassandra,whitepages/cassandra,yangzhe1991/cassandra,beobal/cassandra,vramaswamy456/cassandra,stef1927/cassandra,ifesdjeen/cassandra,thelastpickle/cassandra,ben-manes/cassandra,regispl/cassandra,spodkowinski/cassandra,aureagle/cassandra,driftx/cassandra,mheffner/cassandra-1,cooldoger/cassandra,yhnishi/cassandra,instaclustr/cassandra,yonglehou/cassandra,belliottsmith/cassandra,thobbs/cassandra,bcoverston/cassandra,mheffner/cassandra-1,joesiewert/cassandra,tommystendahl/cassandra,szhou1234/cassandra,helena/cassandra,sharvanath/cassandra,Imran-C/cassandra,vaibhi9/cassandra,thelastpickle/cassandra,adelapena/cassandra,jasobrown/cassandra,caidongyun/cassandra,MasahikoSawada/cassandra,adelapena/cassandra,yanbit/cassandra,dongjiaqiang/cassandra,mshuler/cassandra,scaledata/cassandra,jeffjirsa/cassandra,caidongyun/cassandra,juiceblender/cassandra,nitsanw/cassandra,ptnapoleon/cassandra,Jollyplum/cassandra,phact/cassandra,blerer/cassandra,helena/cassandra,bcoverston/cassandra,mkjellman/cassandra,weipinghe/cassandra,mike-tr-adamson/cassandra,xiongzheng/Cassandra-Research,clohfink/cassandra,instaclustr/cassandra,fengshao0907/cassandra-1,aboudreault/cassandra,ollie314/cassandra,pcn/cassandra-1,blambov/cassandra,tommystendahl/cassandra,juiceblender/cassandra,hengxin/cassandra,mt0803/cassandra,DikangGu/cassandra,szhou1234/cassandra,mkjellman/cassandra,mambocab/cassandra,sedulam/CASSANDRA-12201,nitsanw/cassandra,strapdata/cassandra,thobbs/cassandra,belliottsmith/cassandra,AtwooTM/cassandra,aureagle/cassandra,krummas/cassandra,Instagram/cassandra,stef1927/cassandra,adelapena/cassandra,mike-tr-adamson/cassandra,chaordic/cassandra,ptuckey/cassandra,snazy/cassandra,knifewine/cassandra,macintoshio/cassandra,nitsanw/cassandra,aweisberg/cassandra,AtwooTM/cassandra,helena/cassandra,michaelmior/cassandra,nutbunnies/cassandra,sluk3r/cassandra,ifesdjeen/cassandra,pthomaid/cassandra,RyanMagnusson/cassandra,yukim/cassandra,macintoshio/cassandra,adejanovski/cassandra,pcn/cassandra-1,rogerchina/cassandra,dkua/cassandra,sivikt/cassandra,clohfink/cassandra,iamaleksey/cassandra,mheffner/cassandra-1,WorksApplications/cassandra,ifesdjeen/cassandra,adejanovski/cassandra,rogerchina/cassandra,josh-mckenzie/cassandra,emolsson/cassandra,blerer/cassandra,tjake/cassandra,pkdevbox/cassandra,exoscale/cassandra,jbellis/cassandra,Jollyplum/cassandra,dongjiaqiang/cassandra,adejanovski/cassandra,sivikt/cassandra,whitepages/cassandra,chbatey/cassandra-1,tjake/cassandra,blambov/cassandra
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.helpers.MessageFormatter;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.composites.Composite;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.ClientWarn;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.transport.messages.ResultMessage;
/**
* A <code>BATCH</code> statement parsed from a CQL query.
*
*/
public class BatchStatement implements CQLStatement
{
public static enum Type
{
LOGGED, UNLOGGED, COUNTER
}
private final int boundTerms;
public final Type type;
private final List<ModificationStatement> statements;
private final Attributes attrs;
private final boolean hasConditions;
private static final Logger logger = LoggerFactory.getLogger(BatchStatement.class);
/**
* Creates a new BatchStatement from a list of statements and a
* Thrift consistency level.
*
* @param type type of the batch
* @param statements a list of UpdateStatements
* @param attrs additional attributes for statement (CL, timestamp, timeToLive)
*/
public BatchStatement(int boundTerms, Type type, List<ModificationStatement> statements, Attributes attrs)
{
boolean hasConditions = false;
for (ModificationStatement statement : statements)
hasConditions |= statement.hasConditions();
this.boundTerms = boundTerms;
this.type = type;
this.statements = statements;
this.attrs = attrs;
this.hasConditions = hasConditions;
}
public Iterable<org.apache.cassandra.cql3.functions.Function> getFunctions()
{
Iterable<org.apache.cassandra.cql3.functions.Function> functions = attrs.getFunctions();
for (ModificationStatement statement : statements)
functions = Iterables.concat(functions, statement.getFunctions());
return functions;
}
public int getBoundTerms()
{
return boundTerms;
}
public void checkAccess(ClientState state) throws InvalidRequestException, UnauthorizedException
{
for (ModificationStatement statement : statements)
statement.checkAccess(state);
}
// Validates a prepared batch statement without validating its nested statements.
public void validate() throws InvalidRequestException
{
if (attrs.isTimeToLiveSet())
throw new InvalidRequestException("Global TTL on the BATCH statement is not supported.");
boolean timestampSet = attrs.isTimestampSet();
if (timestampSet)
{
if (hasConditions)
throw new InvalidRequestException("Cannot provide custom timestamp for conditional BATCH");
if (type == Type.COUNTER)
throw new InvalidRequestException("Cannot provide custom timestamp for counter BATCH");
}
boolean hasCounters = false;
boolean hasNonCounters = false;
for (ModificationStatement statement : statements)
{
if (timestampSet && statement.isCounter())
throw new InvalidRequestException("Cannot provide custom timestamp for a BATCH containing counters");
if (timestampSet && statement.isTimestampSet())
throw new InvalidRequestException("Timestamp must be set either on BATCH or individual statements");
if (type == Type.COUNTER && !statement.isCounter())
throw new InvalidRequestException("Cannot include non-counter statement in a counter batch");
if (type == Type.LOGGED && statement.isCounter())
throw new InvalidRequestException("Cannot include a counter statement in a logged batch");
if (statement.isCounter())
hasCounters = true;
else
hasNonCounters = true;
}
if (hasCounters && hasNonCounters)
throw new InvalidRequestException("Counter and non-counter mutations cannot exist in the same batch");
if (hasConditions)
{
String ksName = null;
String cfName = null;
for (ModificationStatement stmt : statements)
{
if (ksName != null && (!stmt.keyspace().equals(ksName) || !stmt.columnFamily().equals(cfName)))
throw new InvalidRequestException("Batch with conditions cannot span multiple tables");
ksName = stmt.keyspace();
cfName = stmt.columnFamily();
}
}
}
// The batch itself will be validated in either Parsed#prepare() - for regular CQL3 batches,
// or in QueryProcessor.processBatch() - for native protocol batches.
public void validate(ClientState state) throws InvalidRequestException
{
for (ModificationStatement statement : statements)
statement.validate(state);
}
public List<ModificationStatement> getStatements()
{
return statements;
}
private Collection<? extends IMutation> getMutations(BatchQueryOptions options, boolean local, long now)
throws RequestExecutionException, RequestValidationException
{
Map<String, Map<ByteBuffer, IMutation>> mutations = new HashMap<>();
for (int i = 0; i < statements.size(); i++)
{
ModificationStatement statement = statements.get(i);
QueryOptions statementOptions = options.forStatement(i);
long timestamp = attrs.getTimestamp(now, statementOptions);
addStatementMutations(statement, statementOptions, local, timestamp, mutations);
}
return unzipMutations(mutations);
}
private Collection<? extends IMutation> unzipMutations(Map<String, Map<ByteBuffer, IMutation>> mutations)
{
// The case where all statement where on the same keyspace is pretty common
if (mutations.size() == 1)
return mutations.values().iterator().next().values();
List<IMutation> ms = new ArrayList<>();
for (Map<ByteBuffer, IMutation> ksMap : mutations.values())
ms.addAll(ksMap.values());
return ms;
}
private void addStatementMutations(ModificationStatement statement,
QueryOptions options,
boolean local,
long now,
Map<String, Map<ByteBuffer, IMutation>> mutations)
throws RequestExecutionException, RequestValidationException
{
String ksName = statement.keyspace();
Map<ByteBuffer, IMutation> ksMap = mutations.get(ksName);
if (ksMap == null)
{
ksMap = new HashMap<>();
mutations.put(ksName, ksMap);
}
// The following does the same than statement.getMutations(), but we inline it here because
// we don't want to recreate mutations every time as this is particularly inefficient when applying
// multiple batch to the same partition (see #6737).
List<ByteBuffer> keys = statement.buildPartitionKeyNames(options);
Composite clusteringPrefix = statement.createClusteringPrefix(options);
UpdateParameters params = statement.makeUpdateParameters(keys, clusteringPrefix, options, local, now);
for (ByteBuffer key : keys)
{
IMutation mutation = ksMap.get(key);
Mutation mut;
if (mutation == null)
{
mut = new Mutation(ksName, key);
mutation = statement.cfm.isCounter() ? new CounterMutation(mut, options.getConsistency()) : mut;
ksMap.put(key, mutation);
}
else
{
mut = statement.cfm.isCounter() ? ((CounterMutation)mutation).getMutation() : (Mutation)mutation;
}
statement.addUpdateForKey(mut.addOrGet(statement.cfm), key, clusteringPrefix, params);
}
}
/**
* Checks batch size to ensure threshold is met. If not, a warning is logged.
* @param cfs ColumnFamilies that will store the batch's mutations.
*/
public static void verifyBatchSize(Iterable<ColumnFamily> cfs) throws InvalidRequestException
{
long size = 0;
long warnThreshold = DatabaseDescriptor.getBatchSizeWarnThreshold();
long failThreshold = DatabaseDescriptor.getBatchSizeFailThreshold();
for (ColumnFamily cf : cfs)
size += cf.dataSize();
if (size > warnThreshold)
{
Set<String> ksCfPairs = new HashSet<>();
for (ColumnFamily cf : cfs)
ksCfPairs.add(cf.metadata().ksName + "." + cf.metadata().cfName);
String format = "Batch of prepared statements for {} is of size {}, exceeding specified threshold of {} by {}.{}";
if (size > failThreshold)
{
Tracing.trace(format, ksCfPairs, size, failThreshold, size - failThreshold, " (see batch_size_fail_threshold_in_kb)");
logger.error(format, ksCfPairs, size, failThreshold, size - failThreshold, " (see batch_size_fail_threshold_in_kb)");
throw new InvalidRequestException("Batch too large");
}
else if (logger.isWarnEnabled())
{
logger.warn(format, ksCfPairs, size, warnThreshold, size - warnThreshold, "");
}
ClientWarn.warn(MessageFormatter.arrayFormat(format, new Object[] {ksCfPairs, size, warnThreshold, size - warnThreshold, ""}).getMessage());
}
}
public ResultMessage execute(QueryState queryState, QueryOptions options) throws RequestExecutionException, RequestValidationException
{
return execute(queryState, BatchQueryOptions.withoutPerStatementVariables(options));
}
public ResultMessage execute(QueryState queryState, BatchQueryOptions options) throws RequestExecutionException, RequestValidationException
{
return execute(queryState, options, false, options.getTimestamp(queryState));
}
private ResultMessage execute(QueryState queryState, BatchQueryOptions options, boolean local, long now)
throws RequestExecutionException, RequestValidationException
{
if (options.getConsistency() == null)
throw new InvalidRequestException("Invalid empty consistency level");
if (options.getSerialConsistency() == null)
throw new InvalidRequestException("Invalid empty serial consistency level");
if (hasConditions)
return executeWithConditions(options, queryState);
executeWithoutConditions(getMutations(options, local, now), options.getConsistency());
return new ResultMessage.Void();
}
private void executeWithoutConditions(Collection<? extends IMutation> mutations, ConsistencyLevel cl) throws RequestExecutionException, RequestValidationException
{
// Extract each collection of cfs from it's IMutation and then lazily concatenate all of them into a single Iterable.
Iterable<ColumnFamily> cfs = Iterables.concat(Iterables.transform(mutations, new Function<IMutation, Collection<ColumnFamily>>()
{
public Collection<ColumnFamily> apply(IMutation im)
{
return im.getColumnFamilies();
}
}));
verifyBatchSize(cfs);
boolean mutateAtomic = (type == Type.LOGGED && mutations.size() > 1);
StorageProxy.mutateWithTriggers(mutations, cl, mutateAtomic);
}
private ResultMessage executeWithConditions(BatchQueryOptions options, QueryState state)
throws RequestExecutionException, RequestValidationException
{
long now = state.getTimestamp();
ByteBuffer key = null;
String ksName = null;
String cfName = null;
CQL3CasRequest casRequest = null;
Set<ColumnDefinition> columnsWithConditions = new LinkedHashSet<>();
for (int i = 0; i < statements.size(); i++)
{
ModificationStatement statement = statements.get(i);
QueryOptions statementOptions = options.forStatement(i);
long timestamp = attrs.getTimestamp(now, statementOptions);
List<ByteBuffer> pks = statement.buildPartitionKeyNames(statementOptions);
if (pks.size() > 1)
throw new IllegalArgumentException("Batch with conditions cannot span multiple partitions (you cannot use IN on the partition key)");
if (key == null)
{
key = pks.get(0);
ksName = statement.cfm.ksName;
cfName = statement.cfm.cfName;
casRequest = new CQL3CasRequest(statement.cfm, key, true);
}
else if (!key.equals(pks.get(0)))
{
throw new InvalidRequestException("Batch with conditions cannot span multiple partitions");
}
Composite clusteringPrefix = statement.createClusteringPrefix(statementOptions);
if (statement.hasConditions())
{
statement.addConditions(clusteringPrefix, casRequest, statementOptions);
// As soon as we have a ifNotExists, we set columnsWithConditions to null so that everything is in the resultSet
if (statement.hasIfNotExistCondition() || statement.hasIfExistCondition())
columnsWithConditions = null;
else if (columnsWithConditions != null)
Iterables.addAll(columnsWithConditions, statement.getColumnsWithConditions());
}
casRequest.addRowUpdate(clusteringPrefix, statement, statementOptions, timestamp);
}
ColumnFamily result = StorageProxy.cas(ksName, cfName, key, casRequest, options.getSerialConsistency(), options.getConsistency(), state.getClientState());
return new ResultMessage.Rows(ModificationStatement.buildCasResultSet(ksName, key, cfName, result, columnsWithConditions, true, options.forStatement(0)));
}
public ResultMessage executeInternal(QueryState queryState, QueryOptions options) throws RequestValidationException, RequestExecutionException
{
assert !hasConditions;
for (IMutation mutation : getMutations(BatchQueryOptions.withoutPerStatementVariables(options), true, queryState.getTimestamp()))
{
// We don't use counters internally.
assert mutation instanceof Mutation;
((Mutation) mutation).apply();
}
return null;
}
public interface BatchVariables
{
public List<ByteBuffer> getVariablesForStatement(int statementInBatch);
}
public String toString()
{
return String.format("BatchStatement(type=%s, statements=%s)", type, statements);
}
public static class Parsed extends CFStatement
{
private final Type type;
private final Attributes.Raw attrs;
private final List<ModificationStatement.Parsed> parsedStatements;
public Parsed(Type type, Attributes.Raw attrs, List<ModificationStatement.Parsed> parsedStatements)
{
super(null);
this.type = type;
this.attrs = attrs;
this.parsedStatements = parsedStatements;
}
@Override
public void prepareKeyspace(ClientState state) throws InvalidRequestException
{
for (ModificationStatement.Parsed statement : parsedStatements)
statement.prepareKeyspace(state);
}
public ParsedStatement.Prepared prepare() throws InvalidRequestException
{
VariableSpecifications boundNames = getBoundVariables();
String firstKS = null;
String firstCF = null;
boolean haveMultipleCFs = false;
List<ModificationStatement> statements = new ArrayList<>(parsedStatements.size());
for (ModificationStatement.Parsed parsed : parsedStatements)
{
if (firstKS == null)
{
firstKS = parsed.keyspace();
firstCF = parsed.columnFamily();
}
else if (!haveMultipleCFs)
{
haveMultipleCFs = !firstKS.equals(parsed.keyspace()) || !firstCF.equals(parsed.columnFamily());
}
statements.add(parsed.prepare(boundNames));
}
Attributes prepAttrs = attrs.prepare("[batch]", "[batch]");
prepAttrs.collectMarkerSpecification(boundNames);
BatchStatement batchStatement = new BatchStatement(boundNames.size(), type, statements, prepAttrs);
batchStatement.validate();
// Use the CFMetadata of the first statement for partition key bind indexes. If the statements affect
// multiple tables, we won't send partition key bind indexes.
Short[] partitionKeyBindIndexes = haveMultipleCFs ? null
: boundNames.getPartitionKeyBindIndexes(batchStatement.statements.get(0).cfm);
return new ParsedStatement.Prepared(batchStatement, boundNames, partitionKeyBindIndexes);
}
}
}
|
src/java/org/apache/cassandra/cql3/statements/BatchStatement.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.*;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.composites.Composite;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.ClientWarn;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.service.StorageProxy;
import org.apache.cassandra.tracing.Tracing;
import org.apache.cassandra.transport.messages.ResultMessage;
/**
* A <code>BATCH</code> statement parsed from a CQL query.
*
*/
public class BatchStatement implements CQLStatement
{
public static enum Type
{
LOGGED, UNLOGGED, COUNTER
}
private final int boundTerms;
public final Type type;
private final List<ModificationStatement> statements;
private final Attributes attrs;
private final boolean hasConditions;
private static final Logger logger = LoggerFactory.getLogger(BatchStatement.class);
/**
* Creates a new BatchStatement from a list of statements and a
* Thrift consistency level.
*
* @param type type of the batch
* @param statements a list of UpdateStatements
* @param attrs additional attributes for statement (CL, timestamp, timeToLive)
*/
public BatchStatement(int boundTerms, Type type, List<ModificationStatement> statements, Attributes attrs)
{
boolean hasConditions = false;
for (ModificationStatement statement : statements)
hasConditions |= statement.hasConditions();
this.boundTerms = boundTerms;
this.type = type;
this.statements = statements;
this.attrs = attrs;
this.hasConditions = hasConditions;
}
public Iterable<org.apache.cassandra.cql3.functions.Function> getFunctions()
{
Iterable<org.apache.cassandra.cql3.functions.Function> functions = attrs.getFunctions();
for (ModificationStatement statement : statements)
functions = Iterables.concat(functions, statement.getFunctions());
return functions;
}
public int getBoundTerms()
{
return boundTerms;
}
public void checkAccess(ClientState state) throws InvalidRequestException, UnauthorizedException
{
for (ModificationStatement statement : statements)
statement.checkAccess(state);
}
// Validates a prepared batch statement without validating its nested statements.
public void validate() throws InvalidRequestException
{
if (attrs.isTimeToLiveSet())
throw new InvalidRequestException("Global TTL on the BATCH statement is not supported.");
boolean timestampSet = attrs.isTimestampSet();
if (timestampSet)
{
if (hasConditions)
throw new InvalidRequestException("Cannot provide custom timestamp for conditional BATCH");
if (type == Type.COUNTER)
throw new InvalidRequestException("Cannot provide custom timestamp for counter BATCH");
}
boolean hasCounters = false;
boolean hasNonCounters = false;
for (ModificationStatement statement : statements)
{
if (timestampSet && statement.isCounter())
throw new InvalidRequestException("Cannot provide custom timestamp for a BATCH containing counters");
if (timestampSet && statement.isTimestampSet())
throw new InvalidRequestException("Timestamp must be set either on BATCH or individual statements");
if (type == Type.COUNTER && !statement.isCounter())
throw new InvalidRequestException("Cannot include non-counter statement in a counter batch");
if (type == Type.LOGGED && statement.isCounter())
throw new InvalidRequestException("Cannot include a counter statement in a logged batch");
if (statement.isCounter())
hasCounters = true;
else
hasNonCounters = true;
}
if (hasCounters && hasNonCounters)
throw new InvalidRequestException("Counter and non-counter mutations cannot exist in the same batch");
if (hasConditions)
{
String ksName = null;
String cfName = null;
for (ModificationStatement stmt : statements)
{
if (ksName != null && (!stmt.keyspace().equals(ksName) || !stmt.columnFamily().equals(cfName)))
throw new InvalidRequestException("Batch with conditions cannot span multiple tables");
ksName = stmt.keyspace();
cfName = stmt.columnFamily();
}
}
}
// The batch itself will be validated in either Parsed#prepare() - for regular CQL3 batches,
// or in QueryProcessor.processBatch() - for native protocol batches.
public void validate(ClientState state) throws InvalidRequestException
{
for (ModificationStatement statement : statements)
statement.validate(state);
}
public List<ModificationStatement> getStatements()
{
return statements;
}
private Collection<? extends IMutation> getMutations(BatchQueryOptions options, boolean local, long now)
throws RequestExecutionException, RequestValidationException
{
Map<String, Map<ByteBuffer, IMutation>> mutations = new HashMap<>();
for (int i = 0; i < statements.size(); i++)
{
ModificationStatement statement = statements.get(i);
QueryOptions statementOptions = options.forStatement(i);
long timestamp = attrs.getTimestamp(now, statementOptions);
addStatementMutations(statement, statementOptions, local, timestamp, mutations);
}
return unzipMutations(mutations);
}
private Collection<? extends IMutation> unzipMutations(Map<String, Map<ByteBuffer, IMutation>> mutations)
{
// The case where all statement where on the same keyspace is pretty common
if (mutations.size() == 1)
return mutations.values().iterator().next().values();
List<IMutation> ms = new ArrayList<>();
for (Map<ByteBuffer, IMutation> ksMap : mutations.values())
ms.addAll(ksMap.values());
return ms;
}
private void addStatementMutations(ModificationStatement statement,
QueryOptions options,
boolean local,
long now,
Map<String, Map<ByteBuffer, IMutation>> mutations)
throws RequestExecutionException, RequestValidationException
{
String ksName = statement.keyspace();
Map<ByteBuffer, IMutation> ksMap = mutations.get(ksName);
if (ksMap == null)
{
ksMap = new HashMap<>();
mutations.put(ksName, ksMap);
}
// The following does the same than statement.getMutations(), but we inline it here because
// we don't want to recreate mutations every time as this is particularly inefficient when applying
// multiple batch to the same partition (see #6737).
List<ByteBuffer> keys = statement.buildPartitionKeyNames(options);
Composite clusteringPrefix = statement.createClusteringPrefix(options);
UpdateParameters params = statement.makeUpdateParameters(keys, clusteringPrefix, options, local, now);
for (ByteBuffer key : keys)
{
IMutation mutation = ksMap.get(key);
Mutation mut;
if (mutation == null)
{
mut = new Mutation(ksName, key);
mutation = statement.cfm.isCounter() ? new CounterMutation(mut, options.getConsistency()) : mut;
ksMap.put(key, mutation);
}
else
{
mut = statement.cfm.isCounter() ? ((CounterMutation)mutation).getMutation() : (Mutation)mutation;
}
statement.addUpdateForKey(mut.addOrGet(statement.cfm), key, clusteringPrefix, params);
}
}
/**
* Checks batch size to ensure threshold is met. If not, a warning is logged.
* @param cfs ColumnFamilies that will store the batch's mutations.
*/
public static void verifyBatchSize(Iterable<ColumnFamily> cfs) throws InvalidRequestException
{
long size = 0;
long warnThreshold = DatabaseDescriptor.getBatchSizeWarnThreshold();
long failThreshold = DatabaseDescriptor.getBatchSizeFailThreshold();
for (ColumnFamily cf : cfs)
size += cf.dataSize();
if (size > warnThreshold)
{
Set<String> ksCfPairs = new HashSet<>();
for (ColumnFamily cf : cfs)
ksCfPairs.add(cf.metadata().ksName + "." + cf.metadata().cfName);
String format = "Batch of prepared statements for {} is of size {}, exceeding specified threshold of {} by {}.{}";
if (size > failThreshold)
{
Tracing.trace(format, ksCfPairs, size, failThreshold, size - failThreshold, " (see batch_size_fail_threshold_in_kb)");
logger.error(format, ksCfPairs, size, failThreshold, size - failThreshold, " (see batch_size_fail_threshold_in_kb)");
throw new InvalidRequestException("Batch too large");
}
else if (logger.isWarnEnabled())
{
logger.warn(format, ksCfPairs, size, warnThreshold, size - warnThreshold, "");
}
ClientWarn.warn(String.format(format, ksCfPairs, size, warnThreshold, size - warnThreshold, ""));
}
}
public ResultMessage execute(QueryState queryState, QueryOptions options) throws RequestExecutionException, RequestValidationException
{
return execute(queryState, BatchQueryOptions.withoutPerStatementVariables(options));
}
public ResultMessage execute(QueryState queryState, BatchQueryOptions options) throws RequestExecutionException, RequestValidationException
{
return execute(queryState, options, false, options.getTimestamp(queryState));
}
private ResultMessage execute(QueryState queryState, BatchQueryOptions options, boolean local, long now)
throws RequestExecutionException, RequestValidationException
{
if (options.getConsistency() == null)
throw new InvalidRequestException("Invalid empty consistency level");
if (options.getSerialConsistency() == null)
throw new InvalidRequestException("Invalid empty serial consistency level");
if (hasConditions)
return executeWithConditions(options, queryState);
executeWithoutConditions(getMutations(options, local, now), options.getConsistency());
return new ResultMessage.Void();
}
private void executeWithoutConditions(Collection<? extends IMutation> mutations, ConsistencyLevel cl) throws RequestExecutionException, RequestValidationException
{
// Extract each collection of cfs from it's IMutation and then lazily concatenate all of them into a single Iterable.
Iterable<ColumnFamily> cfs = Iterables.concat(Iterables.transform(mutations, new Function<IMutation, Collection<ColumnFamily>>()
{
public Collection<ColumnFamily> apply(IMutation im)
{
return im.getColumnFamilies();
}
}));
verifyBatchSize(cfs);
boolean mutateAtomic = (type == Type.LOGGED && mutations.size() > 1);
StorageProxy.mutateWithTriggers(mutations, cl, mutateAtomic);
}
private ResultMessage executeWithConditions(BatchQueryOptions options, QueryState state)
throws RequestExecutionException, RequestValidationException
{
long now = state.getTimestamp();
ByteBuffer key = null;
String ksName = null;
String cfName = null;
CQL3CasRequest casRequest = null;
Set<ColumnDefinition> columnsWithConditions = new LinkedHashSet<>();
for (int i = 0; i < statements.size(); i++)
{
ModificationStatement statement = statements.get(i);
QueryOptions statementOptions = options.forStatement(i);
long timestamp = attrs.getTimestamp(now, statementOptions);
List<ByteBuffer> pks = statement.buildPartitionKeyNames(statementOptions);
if (pks.size() > 1)
throw new IllegalArgumentException("Batch with conditions cannot span multiple partitions (you cannot use IN on the partition key)");
if (key == null)
{
key = pks.get(0);
ksName = statement.cfm.ksName;
cfName = statement.cfm.cfName;
casRequest = new CQL3CasRequest(statement.cfm, key, true);
}
else if (!key.equals(pks.get(0)))
{
throw new InvalidRequestException("Batch with conditions cannot span multiple partitions");
}
Composite clusteringPrefix = statement.createClusteringPrefix(statementOptions);
if (statement.hasConditions())
{
statement.addConditions(clusteringPrefix, casRequest, statementOptions);
// As soon as we have a ifNotExists, we set columnsWithConditions to null so that everything is in the resultSet
if (statement.hasIfNotExistCondition() || statement.hasIfExistCondition())
columnsWithConditions = null;
else if (columnsWithConditions != null)
Iterables.addAll(columnsWithConditions, statement.getColumnsWithConditions());
}
casRequest.addRowUpdate(clusteringPrefix, statement, statementOptions, timestamp);
}
ColumnFamily result = StorageProxy.cas(ksName, cfName, key, casRequest, options.getSerialConsistency(), options.getConsistency(), state.getClientState());
return new ResultMessage.Rows(ModificationStatement.buildCasResultSet(ksName, key, cfName, result, columnsWithConditions, true, options.forStatement(0)));
}
public ResultMessage executeInternal(QueryState queryState, QueryOptions options) throws RequestValidationException, RequestExecutionException
{
assert !hasConditions;
for (IMutation mutation : getMutations(BatchQueryOptions.withoutPerStatementVariables(options), true, queryState.getTimestamp()))
{
// We don't use counters internally.
assert mutation instanceof Mutation;
((Mutation) mutation).apply();
}
return null;
}
public interface BatchVariables
{
public List<ByteBuffer> getVariablesForStatement(int statementInBatch);
}
public String toString()
{
return String.format("BatchStatement(type=%s, statements=%s)", type, statements);
}
public static class Parsed extends CFStatement
{
private final Type type;
private final Attributes.Raw attrs;
private final List<ModificationStatement.Parsed> parsedStatements;
public Parsed(Type type, Attributes.Raw attrs, List<ModificationStatement.Parsed> parsedStatements)
{
super(null);
this.type = type;
this.attrs = attrs;
this.parsedStatements = parsedStatements;
}
@Override
public void prepareKeyspace(ClientState state) throws InvalidRequestException
{
for (ModificationStatement.Parsed statement : parsedStatements)
statement.prepareKeyspace(state);
}
public ParsedStatement.Prepared prepare() throws InvalidRequestException
{
VariableSpecifications boundNames = getBoundVariables();
String firstKS = null;
String firstCF = null;
boolean haveMultipleCFs = false;
List<ModificationStatement> statements = new ArrayList<>(parsedStatements.size());
for (ModificationStatement.Parsed parsed : parsedStatements)
{
if (firstKS == null)
{
firstKS = parsed.keyspace();
firstCF = parsed.columnFamily();
}
else if (!haveMultipleCFs)
{
haveMultipleCFs = !firstKS.equals(parsed.keyspace()) || !firstCF.equals(parsed.columnFamily());
}
statements.add(parsed.prepare(boundNames));
}
Attributes prepAttrs = attrs.prepare("[batch]", "[batch]");
prepAttrs.collectMarkerSpecification(boundNames);
BatchStatement batchStatement = new BatchStatement(boundNames.size(), type, statements, prepAttrs);
batchStatement.validate();
// Use the CFMetadata of the first statement for partition key bind indexes. If the statements affect
// multiple tables, we won't send partition key bind indexes.
Short[] partitionKeyBindIndexes = haveMultipleCFs ? null
: boundNames.getPartitionKeyBindIndexes(batchStatement.statements.get(0).cfm);
return new ParsedStatement.Prepared(batchStatement, boundNames, partitionKeyBindIndexes);
}
}
}
|
use slf4j string formatter for strings with {} markers
|
src/java/org/apache/cassandra/cql3/statements/BatchStatement.java
|
use slf4j string formatter for strings with {} markers
|
|
Java
|
apache-2.0
|
4b5bea8661786ae7b2e9fdb2eb2e4a28371c1270
| 0
|
arjleon/android-argazki
|
package elor.leon.android.argazki;
import android.content.Context;
import android.widget.ImageView;
public class ArgazkiRequest {
private Context mContext;
private String mUrl;
private boolean mScale = false;
private int mMaxDimension;
private ImageView mImageView;
private ArgazkiListener mCustomListener;
ArgazkiRequest(Context context) {
mContext = context.getApplicationContext();
}
Context getContext() {
return mContext;
}
String getUrl() {
return mUrl;
}
boolean shouldScale() {
return mScale;
}
int getMaxDimension() {
return mMaxDimension;
}
boolean hasImageView() {
return mImageView != null;
}
ImageView getImageView() {
return mImageView;
}
boolean hasListener() {
return mCustomListener != null;
}
ArgazkiListener getListener() {
return mCustomListener;
}
public ArgazkiRequest from(final String url) {
mUrl = url;
return this;
}
public ArgazkiRequest scaleTo(final int maxDimension) {
mScale = true;
mMaxDimension = maxDimension;
return this;
}
/**
* With this method the fetched image will be set to the passed ImageView object automatically.
*/
public ArgazkiRequest to(final ImageView imageView) {
mImageView = imageView;
return this;
}
/**
* This method is called when the implementing object requires direct handling of the Bitmap
* once it is fetched.
*/
public ArgazkiRequest callback(final ArgazkiListener listener) {
mCustomListener = listener;
return this;
}
public void go() {
ArgazkiManager
.getInstance()
.addRequest(this);
}
}
|
src/elor/leon/android/argazki/ArgazkiRequest.java
|
package elor.leon.android.argazki;
import android.content.Context;
import android.widget.ImageView;
public class ArgazkiRequest {
private Context mContext;
private String mUrl;
private boolean mScale = false;
private int mMaxDimension;
private ImageView mImageView;
ArgazkiRequest(Context context) {
mContext = context.getApplicationContext();
}
Context getContext() {
return mContext;
}
String getUrl() {
return mUrl;
}
boolean shouldScale() {
return mScale;
}
int getMaxDimension() {
return mMaxDimension;
}
ImageView getImageView() {
return mImageView;
}
public ArgazkiRequest from(final String url) {
mUrl = url;
return this;
}
public ArgazkiRequest scaleTo(final int maxDimension) {
mScale = true;
mMaxDimension = maxDimension;
return this;
}
public void to(final ImageView imageView) {
mImageView = imageView;
ArgazkiManager
.getInstance()
.addRequest(this);
}
}
|
Added alternative via callback as alternative to providing ImageView.
|
src/elor/leon/android/argazki/ArgazkiRequest.java
|
Added alternative via callback as alternative to providing ImageView.
|
|
Java
|
apache-2.0
|
aca17b0bb0f7125c0b6293dc8e7ea70fae96a3b8
| 0
|
keithbrown/bptest,xtuml/bptest,xtuml/bptest,keithbrown/bptest,rmulvey/bptest,rmulvey/bptest,xtuml/bptest,rmulvey/bptest,keithbrown/bptest
|
package org.xtuml.bp.test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ResourceAttributes;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.internal.progress.BlockedJobsDialog;
import org.xtuml.bp.core.CorePlugin;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.common.ModelElement;
import org.xtuml.bp.core.common.NonRootModelElement;
import org.xtuml.bp.core.common.Transaction;
import org.xtuml.bp.core.common.TransactionManager;
import org.xtuml.bp.core.ui.dialogs.ElementSelectionDialog;
import org.xtuml.bp.core.ui.dialogs.ElementSelectionFlatView;
import org.xtuml.bp.test.common.BaseTest;
import org.xtuml.bp.test.common.FailableRunnable;
import org.xtuml.bp.test.common.TestingUtilities;
import org.xtuml.bp.test.common.UITestingUtilities;
import org.xtuml.bp.ui.canvas.Ooaofgraphics;
import org.xtuml.bp.utilities.ui.CanvasUtilities;
import junit.framework.TestCase;
/**
* Contains utility methods related to automated testing of BridgePoint.
*/
@SuppressWarnings("restriction")
public class TestUtil {
/**
* This is used to store the text of a dialog just before it is dismissed. It
* allows us to validate that the text that was in the dialog is what we
* expected.
*/
public static String dialogText;
/**
* Asynchronously waits for the given duration, during which time the caller is
* expected to cause a (likely modal) dialog to be displayed, then dismisses
* that dialog if it is indeed displayed. If it's not displayed, another attempt
* will be made through a recursive call, up to a certain number of attempts (in
* case the dialog is never shown).
*/
public static void dismissDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, true);
}
/**
* Cancels (via the Cancel button) a dialog after waiting for the given time
*/
public static void cancelDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false);
}
/**
* Answers no (via the No button) a dialog after waiting for the given time
*/
public static void noToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&No", null, true);
}
public static void dontSaveToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "Do&n't Save", null, true);
}
/**
* Answers yes (via the Yes button) a dialog after waiting for the given time
*/
public static void yesToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Yes", null, true);
}
/**
* Answers save (via the Save button) a dialog after waiting for the given time
*/
public static void saveToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Save", null, true);
}
/**
* Answers OK (via the OK button) a dialog after waiting for the given time
*/
public static void okToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "OK", null, true);
}
public static void okToDialog(final long inHowManyMillis, boolean throwException) {
dismissDialog(inHowManyMillis, 0, false, "OK", null, throwException);
}
/**
* Selects the Next button in the active dialog.
*/
public static void nextToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Next >", null, false);
}
/**
* Selects the Finish button in the active dialog
*/
public static void finishToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Finish", null, false);
}
/**
* Selects the Finish button in the active dialog
*/
public static void mergeToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Merge", null, false);
}
/**
* Selects a tree item in a dialog containing a tree
*/
public static void selectItemInTreeDialog(final long inHowManyMillis, String treeItem) {
dismissDialog(inHowManyMillis, 0, false, null, treeItem, false);
}
/**
* Presses Debug in the dialog
*/
public static void debugToDialog(long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Debug", null, false);
}
/**
* Select the button on the dialog with the given text
*/
public static void selectButtonInDialog(final long inHowManyMillis, String buttonName) {
dismissDialog(inHowManyMillis, 0, false, buttonName, null, true);
}
/**
* Select the button on the dialog with the given text
*/
public static void selectButtonInDialog(final long inHowManyMillis, String buttonName, boolean throwException) {
dismissDialog(inHowManyMillis, 0, false, buttonName, null, throwException);
}
private static void dismissDialog(final long inHowManyMillis, final int currentRecursionDepth,
final boolean shouldDismiss) {
dismissDialog(inHowManyMillis, currentRecursionDepth, shouldDismiss, null, null, true);
}
public interface ShellProcessor {
public boolean processShell(Shell shell);
}
static boolean processed = false;
static Shell[] shellsBeforeAction = new Shell[0];
public static void dismissShell(ShellProcessor processor) {
processShell(null, processor);
}
public static class ShellProcessorThread extends Thread {
List<Thread> threads = new ArrayList<Thread>();
boolean processing = false;
public ShellProcessorThread(String name) {
super(name);
}
public void addThread(Thread thread) {
threads.add(thread);
}
public void removeThread(Thread thread) {
threads.remove(thread);
}
@Override
public void run() {
while (true) {
while (threads.size() > 0) {
Thread next = threads.remove(0);
processing = true;
next.start();
try {
next.join();
} catch (InterruptedException e) {
TestCase.fail(e.getMessage());
} finally {
processing = false;
}
}
try {
sleep(100);
} catch (InterruptedException e) {
TestCase.fail(e.getMessage());
}
}
}
public boolean isEmpty() {
return threads.isEmpty() && processing == false;
}
}
public static ShellProcessorThread shellProcessorThread = new ShellProcessorThread("Shell Processing");
static {
shellProcessorThread.start();
}
public static long maxRunTime = 2000;
public static void processShell(Shell[] shellsBeforeAction, ShellProcessor processor) {
if (shellsBeforeAction == null) {
// cache the current shells, we will use the knowledge that a new
// one is imminent
// to determine when and which shell to close
TestUtil.shellsBeforeAction = PlatformUI.getWorkbench().getDisplay().getShells();
} else {
TestUtil.shellsBeforeAction = shellsBeforeAction;
}
// run a new thread which expires after 2 seconds
// this can be increased if anything takes longer than
// that to display (otherwise it is used for the case
// where
Thread processThread = new Thread(() -> {
long startTime = System.currentTimeMillis();
long runTime = System.currentTimeMillis() - startTime;
processed = false;
while (runTime < maxRunTime) {
if (PlatformUI.getWorkbench().getDisplay().isDisposed()) {
return;
}
PlatformUI.getWorkbench().getDisplay().syncExec(() -> {
Shell[] currentShells = PlatformUI.getWorkbench().getDisplay().getShells();
HashSet<Shell> uniqueSet = new HashSet<>(Arrays.asList(TestUtil.shellsBeforeAction));
// locate a unique shell in the latest
for (int i = currentShells.length; --i >= 0;) {
Shell shell = currentShells[i];
boolean added = uniqueSet.add(shell);
if (added) {
// unique shell, test to make sure it is
// not a temporary shell during setup of
// the one we expect
if (!(shell.getData() instanceof Object[])
&& !(shell.getData() instanceof ProgressMonitorDialog)
&& !(shell.getData() instanceof BlockedJobsDialog)
&& (!shell.getText().equals("") || (shell.getText().equals("")
&& shell.getData() instanceof WizardDialog))) {
// we have our new shell, process as we
// did before
processed = processor.processShell(shell);
if (processed) {
break;
}
}
}
}
if (!processed) {
TestUtil.shellsBeforeAction = currentShells;
}
});
if (processed) {
return;
}
sleep(50);
runTime = System.currentTimeMillis() - startTime;
}
});
// add thread to processor
shellProcessorThread.addThread(processThread);
}
static Shell[] currentShells = null;
public static void dismissDialog(final long inHowManyMillis, final int currentRecursionDepth,
final boolean shouldDismiss, final String button, final String treeItem, final boolean throwException) {
dismissShell(shell -> {
dialogText = "";
// close the dialog
Control[] ctrls = ((Dialog) shell.getData()).getShell().getChildren();
for (int i = 0; i < ctrls.length; i++) {
Control ctrl = ctrls[i];
if (ctrl instanceof Label) {
dialogText = dialogText + ((Label) ctrl).getText();
}
}
if (shouldDismiss) {
((Dialog) shell.getData()).close();
} else if (button != null) {
Button foundButton = findButton(shell, button);
if (foundButton != null) {
foundButton.setSelection(true);
foundButton.notifyListeners(SWT.Selection, null);
return true;
}
} else if (treeItem != null) {
Tree tree = UITestingUtilities.findTree(shell);
if (tree != null) {
TreeItem item = UITestingUtilities.findItemInTree(tree, treeItem);
if (item != null) {
tree.select(item);
return true;
} else {
CorePlugin.logError("Unable to locate tree item in tree: " + treeItem, null);
}
} else {
CorePlugin.logError("Unable to locate a tree in the dialog.", null);
}
} else {
cancelDialog((Dialog) shell.getData());
}
return false;
});
}
//
public static void checkTableItems(final long inHowManyMillis, final int currentRecursionDepth,
final boolean shouldDismiss, final String actualResultFilePath) {
dismissShell(shell -> {
if (shell != null && shell.getData() instanceof Dialog) {
// close the dialog
if (!(shell.getData() instanceof ProgressMonitorDialog)) {
Control[] ctrls = ((Dialog) shell.getData()).getShell().getChildren();
for (int i = 0; i < ctrls.length; i++) {
Control ctrl = ctrls[i];
if (ctrl instanceof Label) {
dialogText = dialogText + ((Label) ctrl).getText();
}
}
if (shouldDismiss) {
((Dialog) shell.getData()).close();
return true;
} else {
TableItem[] items = getTableItems(shell);
if (items.length > 0) {
String[] actualFirstColumn = new String[items.length];
String[] actualSecondColumn = new String[items.length];
for (int i = 0; i < items.length; i++) {
actualFirstColumn[i] = items[i].getText(0);
actualSecondColumn[i] = items[i].getText(1);
}
String output = createTable(actualFirstColumn, actualSecondColumn);
try {
FileWriter writer = new FileWriter(actualResultFilePath);
writer.write(output);
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
Button foundButton = findButton(shell, "OK");
if (foundButton != null) {
foundButton.setSelection(true);
foundButton.notifyListeners(SWT.Selection, null);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
return true;
}
}
}
}
return false;
});
}
private static String createTable(String[] firstColumn, String[] secondColumn) {
int shorterLenght;
int difference = firstColumn.length - secondColumn.length;
if (difference > 0) {
shorterLenght = secondColumn.length;
} else {
shorterLenght = firstColumn.length;
}
String result = "";
int i = 0;
for (; i < shorterLenght; i++) {
result += firstColumn[i] + "\t" + secondColumn[i] + "\r\n";
}
return result;
}
//
private static void cancelDialog(Dialog dialog) {
Control bb = dialog.buttonBar;
Button cb = findButton(bb.getParent(), "Cancel");
cb.notifyListeners(SWT.Selection, null);
}
public static Button findButton(Composite parent, String buttonName) {
Control[] child_set = parent.getChildren();
for (int i = 0; i < child_set.length; ++i) {
if (child_set[i] instanceof Button) {
Button cc = (Button) child_set[i];
String l = cc.getText();
if (l.equals(buttonName) || l.equals(buttonName.replaceAll("&", ""))) {
return cc;
}
} else if (child_set[i] instanceof Composite) {
Button result = findButton((Composite) child_set[i], buttonName);
if (result != null) {
return result;
}
}
}
return null;
}
public static TableItem[] getTableItems(Composite parent) {
Control[] child_set = parent.getChildren();
for (int i = 0; i < child_set.length; ++i) {
if (child_set[i] instanceof ElementSelectionFlatView) {
ElementSelectionFlatView page = (ElementSelectionFlatView) child_set[i];
Table table = page.getTable();
TableItem[] items = table.getItems();
return items;
} else if (child_set[i] instanceof Table) {
return ((Table) child_set[i]).getItems();
} else if (child_set[i] instanceof Composite) {
TableItem[] result = getTableItems((Composite) child_set[i]);
if (result != null) {
return result;
}
}
}
return null;
}
/**
* A shorthand method for telling the current thread to sleep for the given
* amount of milliseconds.
*/
public static void sleep(long millis) {
try {
Thread.sleep(millis);
} catch (InterruptedException e) {
}
}
/**
* Sleeps the current thread for the given amount of milliseconds, but
* periodically dispatches all pending UI events. Inserting a temporary call to
* this should "pause" the current test for the given duration, while still
* allowing the UI to be seen and manipulated during that time, which may be an
* aid in debugging.
*/
public static void sleepWithDispatchOfEvents(long millis) {
// the given duration must be at least as long as our sleep interval,
// otherwise no sleeping will be done, below
final int sleepInterval = 10;
if (millis < sleepInterval)
millis = sleepInterval;
// for each interval that makes up the given duration
for (int i = 0; i < millis / sleepInterval; i++) {
// sleep for this interval
sleep(sleepInterval);
// dispatch any pending UI events
while (Display.getCurrent().readAndDispatch())
;
}
}
/**
* Copies the given file to a new one at the given destination path. If a file
* is already at that location, it will be overwritten.
*
* For copying an Eclipse IFile, IFile.create() should be used instead of this.
*/
public static void copyFile(File file, String destPath) {
try {
// open streams on the file and the
// destination location
File copy = new File(destPath);
FileInputStream in = new FileInputStream(file);
FileOutputStream out = new FileOutputStream(copy);
// transfer the bytes from the file to the copy
int c;
while ((c = in.read()) != -1)
out.write(c);
// close the streams
in.close();
out.close();
} catch (IOException e) {
CorePlugin.logError("Could not copy file", e);
}
}
/**
* Dispatches outstanding events when progress is complete.
*/
public static class DispatchOnDoneProgressMonitor extends NullProgressMonitor {
/**
* Is set to true when the done() method completes.
*/
public boolean done = false;
/*
* (non-Javadoc)
*
* @see org.eclipse.core.runtime.IProgressMonitor#done()
*/
public void done() {
Display display = Display.getCurrent();
while (display.readAndDispatch())
;
done = true;
}
}
/**
* Copies a class file of the given name from the development workspace project
* of the given name into the given test workspace project.
*/
public static IFile copyClassFileIntoProject(String className, String copyFromProjectName, IProject toProject) {
// locate the required file in the development workspace
File workspaceSource = TestingUtilities.getSourceDirectory().toFile().getParentFile();
File file = new File(workspaceSource, copyFromProjectName + "/bin/lib/" + className + ".class");
IFile resource = toProject.getFile("/bin/lib/" + file.getName());
File copyFile = resource.getLocation().toFile();
// copy the test file into the given project (and wait
// until the resulting model-events have been dispatched,
// before proceeding)
copyFile(file, copyFile.getAbsolutePath());
try {
FileInputStream stream = new FileInputStream(file);
DispatchOnDoneProgressMonitor monitor = new DispatchOnDoneProgressMonitor();
resource.create(stream, true, monitor);
while (!monitor.done)
TestUtil.sleep(10);
stream.close();
} catch (Exception e) {
CorePlugin.logError("Could not copy test class into project. reason: ", e);
}
return resource;
}
/**
* A convenience method for opening the given project.
*/
public static void openProject(IProject project) {
try {
project.open(new NullProgressMonitor());
} catch (CoreException e) {
CorePlugin.logError("Could not open project", e);
}
}
/**
* A convenience method for closing the given project.
*/
public static void closeProject(IProject project) {
try {
project.close(new NullProgressMonitor());
} catch (CoreException e) {
CorePlugin.logError("Could not close project", e);
}
}
/**
* A convenience method for closing the given project.
*/
public static void deleteProject(IProject project) {
try {
project.delete(false, true, new NullProgressMonitor());
} catch (CoreException e) {
CorePlugin.logError("Could not delete project", e);
}
}
/**
* Makes the BridgePoint perspective the one that is currently active within the
* Eclipse IDE. Returns the page on which the perspective is shown.
*/
public static IWorkbenchPage showBridgePointPerspective() {
return CanvasUtilities.showBridgePointPerspective();
}
/**
* Is the multi-valued return value of createTestProjectAndImportModel(), below.
*/
public static class Result1 {
public Ooaofooa modelRoot;
public IProject project;
public IFile file;
}
/**
* Changes the given file's readonly status
*
* @param readonly
* - A boolean used to determine what status to set the file to
* @param modelFile
* - The file in which the status should be altered
*/
public static void changeFileReadonlyStatus(boolean readonly, IFile modelFile) {
ResourceAttributes resourceAttributes = modelFile.getResourceAttributes();
if (resourceAttributes != null) {
resourceAttributes.setReadOnly(readonly);
try {
modelFile.setResourceAttributes(resourceAttributes);
} catch (CoreException e) {
CorePlugin.logError("Core Exception", e);
}
}
BaseTest.dispatchEvents();
}
/**
* Returns the concatenation of the given array of strings into one string, with
* each of the smaller strings on a new line.
*
* Note that any line-feed characters within the strings will be stripped out
* before writing, to be consistent for comparison purposes with what this
* class's writeToFile() method does.
*/
public static String join(String[] strings) {
// for each string in the given array
StringBuffer buffer = new StringBuffer();
String lineSeparator = System.getProperty("line.separator");
for (int i = 0; i < strings.length; i++) {
// strip out any line-feeds from this string, to be consistent
// with what this class's writeToFile() does, in case a result
// from this method is compared with one of that method
String string = strings[i].replaceAll("\n", "");
// append this string to the joined string we are building
if (i > 0)
buffer.append(lineSeparator);
buffer.append(string);
}
return buffer.toString();
}
/**
* Returns the result of joining the two arrays of strings together into one
* array.
*/
public static String[] join(String[] strings1, String[] strings2) {
// if the second array is empty, just return the first
if (strings2.length == 0)
return strings1;
// for each string in the given two arrays
String[] result = new String[strings1.length + strings2.length];
for (int i = 0; i < strings1.length + strings2.length; i++) {
// put this string into our result, at the right place
result[i] = (i < strings1.length) ? strings1[i] : strings2[i - strings1.length];
}
return result;
}
/**
* Returns the contents of the given text file as a string.
*/
public static String getTextFileContents(File file) {
return getTextFileContents(file, true);
}
public static String getTextFileContents(File file, boolean trim) {
try {
// open a reader on the file
FileInputStream in = new FileInputStream(file);
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
// keep doing this
StringBuffer contents = new StringBuffer();
String lineSeparator = System.getProperty("line.separator");
int linesRead = 0;
while (true) {
// get the next line of text from the file
String line = reader.readLine();
// if we've hit the end of the file, we're done
if (line == null)
break;
// add the line to the results string we're building
if (linesRead > 0)
contents.append(lineSeparator);
if (trim) {
contents.append(line.trim());
} else {
contents.append(line);
}
linesRead++;
}
reader.close();
return contents.toString();
} catch (IOException e) {
TestCase.fail(e.getLocalizedMessage());
return null;
}
}
/**
* Writes the given array of strings out to a text file of the given name
* (including path), one string per line.
*
* Note that any line-feed characters within the strings will be stripped out
* before writing, as they tend to complicate comparisons of the contents of the
* file after they are read back in.
*
* @return Whether the write was successful.
*/
public static boolean writeToFile(String[] strings, String pathName) {
try {
// get the current file contents for comparison
// if there is no difference then we don't need
// to update the result
File resultFile = new File(pathName);
if (resultFile.exists()) {
byte[] fileBytes = new byte[(int) resultFile.length()];
FileInputStream fis = new FileInputStream(resultFile);
fis.read(fileBytes);
fis.close();
String fileContents = new String(fileBytes);
String[] currentContents = null;
if (fileContents.indexOf("\r") != -1) {
currentContents = fileContents.split("\r\n");
} else {
currentContents = fileContents.split("\n");
}
if (stringArraysAreEqual(currentContents, strings)) {
return false;
}
}
// create the file of the given path and name
FileWriter writer = new FileWriter(pathName);
// for each element in the given strings array
String lineSeparator = System.getProperty("line.separator");
for (int i = 0; i < strings.length; i++) {
// strip out any line-feeds from this string, as they
// will cause it to be treated as two or more separate
// strings when it is read back in, which screws up
// comparisons
String string = strings[i].replaceAll("\n", "");
// write this string out to a new line in the text file
if (i > 0)
writer.write(lineSeparator);
writer.write(string);
}
writer.flush();
writer.close();
} catch (Exception e) {
CorePlugin.logError("Could not write strings to text file", e);
return false;
}
return true;
}
private static boolean stringArraysAreEqual(String[] currentContents, String[] strings) {
// if the lengths are different then they are
// not equal
if (currentContents.length != strings.length) {
return false;
}
// otherwise compare each array value
for (int i = 0; i < currentContents.length; i++) {
String string = strings[i].replaceAll("\n", "");
String currentString = currentContents[i].replaceAll("\n", "");
if (!currentString.equals(string)) {
return false;
}
}
return true;
}
public static FailableRunnable chooseItemInDialog(final int sleep, final String item, Shell[] existingShells) {
return chooseItemInDialog(sleep, item, false, existingShells);
}
public static FailableRunnable chooseItemInDialog(final int sleep, final String item, final boolean locateOnly,
Shell[] existingShells) {
return chooseItemInDialog(sleep, item, locateOnly, false, existingShells);
}
public static FailableRunnable chooseItemInDialog(final int sleep, final String item, final boolean locateOnly,
final boolean testNonExistence, Shell[] existingShells) {
return chooseItemInDialog(sleep, null, new String[] { item }, locateOnly, testNonExistence, existingShells);
}
public static FailableRunnable toggleButtonInElementSelectionDialog(final int sleep, final String buttonName,
Shell[] existingShells) {
return toggleButtonInElementSelectionDialog(sleep, null, buttonName, existingShells);
}
public static void cancelElementSelectionDialog(final int sleep, Shell[] existingShells) {
cancelElementSelectionDialog(sleep, null, existingShells);
}
public static void cancelElementSelectionDialog(final int sleep, final FailableRunnable waitRunnable,
Shell[] existingShells) {
Thread cancelThread = new Thread(new Runnable() {
@Override
public void run() {
if (waitRunnable != null) {
waitRunnable.join();
}
PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() {
@Override
public void run() {
processShell(existingShells, shell -> {
if (shell != null && shell.getData() instanceof ElementSelectionDialog) {
ElementSelectionDialog dialog = (ElementSelectionDialog) shell.getData();
dialog.close();
}
return true;
});
}
});
}
});
cancelThread.start();
}
protected static void waitForRunnable(FailableRunnable waitRunnable) {
// if there is a runnable given, wait for
// it to complete
if (waitRunnable != null) {
// wait for a max of 60s
int sleepTime = 0;
while (!waitRunnable.getComplete()) {
if (sleepTime > 60000) {
break;
}
sleep(1000);
sleepTime = sleepTime + 1000;
}
}
}
public static void okElementSelectionDialog(final FailableRunnable runnable, Shell[] existingShells) {
Thread cancelThread = new Thread(new Runnable() {
@Override
public void run() {
waitForRunnable(runnable);
processShell(existingShells, shell -> {
if (shell != null) {
ElementSelectionDialog dialog = (ElementSelectionDialog) shell.getData();
if (dialog.getOkButton().isEnabled()) {
dialog.getOkButton().notifyListeners(SWT.Selection, new Event());
}
}
return true;
});
}
});
cancelThread.start();
}
public static FailableRunnable chooseItemInDialog(FailableRunnable runnable, String item, boolean locateOnly,
Shell[] existingShells) {
return chooseItemInDialog(0, runnable, new String[] { item }, locateOnly, false, existingShells);
}
public static FailableRunnable chooseItemInDialog(final int sleep, final FailableRunnable waitRunnable,
final String item, final boolean locateOnly, final boolean testNonExistence, Shell[] existingShells) {
return chooseItemInDialog(sleep, waitRunnable, item, locateOnly, testNonExistence, existingShells);
}
static int foundItemInDialog = 0;
public static FailableRunnable chooseItemInDialog(final int sleep, final FailableRunnable waitRunnable,
final String[] targetItems, final boolean locateOnly, final boolean testNonExistence,
Shell[] existingShells) {
FailableRunnable runnable = new FailableRunnable() {
@Override
public void run() {
sleep(sleep);
waitForRunnable(waitRunnable);
FailableRunnable innerRunnable = new FailableRunnable() {
@Override
public void run() {
processShell(existingShells, shell -> {
List<String> foundItems = new ArrayList<>();
List<String> notFoundItems = new ArrayList<>(Arrays.asList(targetItems));
if (shell != null) {
Dialog dialog = (Dialog) shell.getData();
Control[] children = dialog.getShell().getChildren();
for (int i = 0; i < children.length; i++) {
Table table = findTable(children);
if (table != null) {
// if a deselect all button is present
// press it before selecting the desired
// item
Button deselect = findButton(shell, "&Deselect All");
if (deselect != null) {
deselect.notifyListeners(SWT.Selection, new Event());
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
}
TableItem[] items = table.getItems();
for (String item : targetItems) {
for (int j = 0; j < items.length; j++) {
if (items[j].getText().equals(item)) {
// do not select if locateOnly is true
if (!locateOnly) {
List<TableItem> currentSelection = new ArrayList<>(
Arrays.asList(table.getSelection()));
currentSelection.add(items[j]);
table.setSelection(currentSelection.toArray(new TableItem[0]));
Event event = new Event();
event.item = items[j];
table.notifyListeners(SWT.Selection, event);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
}
foundItemInDialog++;
foundItems.add(item);
notFoundItems.remove(item);
break;
}
}
}
break;
}
}
}
if (testNonExistence) {
if (foundItemInDialog == targetItems.length) {
setFailure("Found the unexpected item(s) in the selection dialog ("
+ StringUtils.join(foundItems, ", ") + ").");
}
} else {
if (foundItemInDialog != targetItems.length) {
setFailure("Could not locate the expected item(s) in the selection dialog ("
+ StringUtils.join(notFoundItems, ", ") + ").");
}
}
foundItemInDialog = 0;
setComplete();
return true;
});
}
private Table findTable(Control[] children) {
for (Control child : children) {
if (child instanceof Table) {
return (Table) child;
} else if (child instanceof Composite) {
Table result = findTable(((Composite) child).getChildren());
if (result != null) {
return result;
}
}
}
return null;
}
};
if (!PlatformUI.getWorkbench().getDisplay().isDisposed()) {
// must be run in the UI thread
PlatformUI.getWorkbench().getDisplay().syncExec(innerRunnable);
waitForRunnable(innerRunnable);
if (!innerRunnable.getFailure().equals("")) {
setFailure(innerRunnable.getFailure());
}
}
setComplete();
}
};
Thread chooserThread = new Thread(runnable);
chooserThread.start();
return runnable;
}
public static FailableRunnable toggleButtonInElementSelectionDialog(final int sleep,
final FailableRunnable waitRunnable, final String buttonName, Shell[] existingShells) {
FailableRunnable runnable = new FailableRunnable() {
@Override
public void run() {
sleep(sleep);
waitForRunnable(waitRunnable);
FailableRunnable innerRunnable = new FailableRunnable() {
@Override
public void run() {
processShell(existingShells, shell -> {
if (shell != null) {
ElementSelectionDialog dialog = (ElementSelectionDialog) shell.getData();
ElementSelectionFlatView view = dialog.getFlatView();
Control[] children = view.getChildren();
for (int i = 0; i < children.length; i++) {
if (children[i] instanceof Button) {
Button button = (Button) children[i];
if (button.getText().equals(buttonName)) {
button.setSelection((button.getSelection()) ? false : true);
button.notifyListeners(SWT.Selection, new Event());
view.redraw();
view.update();
setComplete();
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
return true;
}
}
}
// if we get here add an error to the thread, as
// the button could not be found
setFailure("Unable to locate button in selection dialog: " + buttonName);
setComplete();
}
return true;
});
}
};
// must be run in the UI thread
PlatformUI.getWorkbench().getDisplay().syncExec(innerRunnable);
if (!innerRunnable.getFailure().equals("")) {
setFailure(innerRunnable.getFailure());
}
setComplete();
}
};
Thread chooserThread = new Thread(runnable);
chooserThread.start();
return runnable;
}
public static void executeInTransaction(NonRootModelElement element, String method, Object[] parameters) {
executeInTransaction(element, method, parameters, true);
}
public static void executeInTransaction(NonRootModelElement element, String method, Object[] parameters,
boolean undoable) {
Class<?>[] paramClasses = new Class<?>[parameters.length];
for (int i = 0; i < parameters.length; i++) {
if (parameters[i] instanceof Integer) {
paramClasses[i] = Integer.TYPE;
} else if (parameters[i] instanceof Boolean) {
paramClasses[i] = Boolean.TYPE;
} else {
paramClasses[i] = parameters[i].getClass();
}
}
Transaction transaction = null;
TransactionManager manager = TransactionManager.getSingleton();
try {
transaction = manager.startTransaction("test transaction",
new ModelElement[] { Ooaofooa.getDefaultInstance(), Ooaofgraphics.getDefaultInstance() }, undoable);
Method m = element.getClass().getMethod(method, paramClasses);
m.invoke(element, parameters);
manager.endTransaction(transaction);
} catch (Exception e) {
if (transaction != null) {
manager.cancelTransaction(transaction, e);
}
CorePlugin.logError("Unable to complete transaction.", e);
}
BaseTest.dispatchEvents(200);
}
}
|
src/org.xtuml.bp.test/src/org/xtuml/bp/test/TestUtil.java
|
//=====================================================================
//
//File: $RCSfile: TestUtil.java,v $
//Version: $Revision: 1.38 $
//Modified: $Date: 2013/05/10 05:37:49 $
//
//(c) Copyright 2004-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
package org.xtuml.bp.test;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.ResourceAttributes;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.jgit.util.StringUtils;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Tree;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.IWorkbenchPage;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.internal.progress.BlockedJobsDialog;
import org.xtuml.bp.core.CorePlugin;
import org.xtuml.bp.core.Ooaofooa;
import org.xtuml.bp.core.common.ModelElement;
import org.xtuml.bp.core.common.NonRootModelElement;
import org.xtuml.bp.core.common.Transaction;
import org.xtuml.bp.core.common.TransactionManager;
import org.xtuml.bp.core.ui.dialogs.ElementSelectionDialog;
import org.xtuml.bp.core.ui.dialogs.ElementSelectionFlatView;
import org.xtuml.bp.test.common.BaseTest;
import org.xtuml.bp.test.common.FailableRunnable;
import org.xtuml.bp.test.common.TestingUtilities;
import org.xtuml.bp.test.common.UITestingUtilities;
import org.xtuml.bp.ui.canvas.Ooaofgraphics;
import org.xtuml.bp.utilities.ui.CanvasUtilities;
import junit.framework.TestCase;
/**
* Contains utility methods related to automated testing of BridgePoint.
*/
@SuppressWarnings("restriction")
public class TestUtil
{
/**
* This is used to store the text of a dialog just before it is dismissed.
* It allows us to validate that the text that was in the dialog is what we expected.
*/
public static String dialogText;
/**
* Asynchronously waits for the given duration, during which time the caller is expected
* to cause a (likely modal) dialog to be displayed, then dismisses that dialog if it
* is indeed displayed. If it's not displayed, another attempt will be made through
* a recursive call, up to a certain number of attempts (in case the dialog is never
* shown).
*/
public static void dismissDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, true);
}
/**
* Cancels (via the Cancel button) a dialog after waiting for the given time
*/
public static void cancelDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, false);
}
/**
* Answers no (via the No button) a dialog after waiting for the given time
*/
public static void noToDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, false, "&No", null, true);
}
public static void dontSaveToDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, false, "Do&n't Save", null, true);
}
/**
* Answers yes (via the Yes button) a dialog after waiting for the given time
*/
public static void yesToDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, false, "&Yes", null, true);
}
/**
* Answers save (via the Save button) a dialog after waiting for the given time
*/
public static void saveToDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, false, "&Save", null, true);
}
/**
* Answers OK (via the OK button) a dialog after waiting for the given time
*/
public static void okToDialog(final long inHowManyMillis)
{
dismissDialog(inHowManyMillis, 0, false, "OK", null, true);
}
public static void okToDialog(final long inHowManyMillis, boolean throwException)
{
dismissDialog(inHowManyMillis, 0, false, "OK", null, throwException);
}
/**
* Selects the Next button in the active dialog.
*/
public static void nextToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Next >", null, false);
}
/**
* Selects the Finish button in the active dialog
*/
public static void finishToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Finish", null, false);
}
/**
* Selects the Finish button in the active dialog
*/
public static void mergeToDialog(final long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Merge", null, false);
}
/**
* Selects a tree item in a dialog containing a tree
*/
public static void selectItemInTreeDialog(final long inHowManyMillis, String treeItem) {
dismissDialog(inHowManyMillis, 0, false, null, treeItem, false);
}
/**
* Presses Debug in the dialog
*/
public static void debugToDialog(long inHowManyMillis) {
dismissDialog(inHowManyMillis, 0, false, "&Debug", null, false);
}
/**
* Select the button on the dialog with the given text
*/
public static void selectButtonInDialog(final long inHowManyMillis, String buttonName)
{
dismissDialog(inHowManyMillis, 0, false, buttonName, null, true);
}
/**
* Select the button on the dialog with the given text
*/
public static void selectButtonInDialog(final long inHowManyMillis, String buttonName, boolean throwException)
{
dismissDialog(inHowManyMillis, 0, false, buttonName, null, throwException);
}
private static void dismissDialog(final long inHowManyMillis,
final int currentRecursionDepth, final boolean shouldDismiss) {
dismissDialog(inHowManyMillis, currentRecursionDepth, shouldDismiss, null, null, true);
}
public interface ShellProcessor {
public boolean processShell(Shell shell);
}
static boolean processed = false;
static Shell[] shellsBeforeAction = new Shell[0];
public static void dismissShell(ShellProcessor processor) {
processShell(null, processor);
}
public static class ShellProcessorThread extends Thread {
List<Thread> threads = new ArrayList<Thread>();
boolean processing = false;
public ShellProcessorThread(String name) {
super(name);
}
public void addThread(Thread thread) {
threads.add(thread);
}
public void removeThread(Thread thread) {
threads.remove(thread);
}
@Override
public void run() {
while(true) {
while(threads.size() > 0) {
Thread next = threads.remove(0);
processing = true;
next.start();
try {
next.join();
} catch (InterruptedException e) {
TestCase.fail(e.getMessage());
} finally {
processing = false;
}
}
try {
sleep(100);
} catch (InterruptedException e) {
TestCase.fail(e.getMessage());
}
}
}
public boolean isEmpty() {
return threads.isEmpty() && processing == false;
}
}
public static ShellProcessorThread shellProcessorThread = new ShellProcessorThread("Shell Processing");
static {
shellProcessorThread.start();
}
public static long maxRunTime = 2000;
public static void processShell(Shell[] shellsBeforeAction, ShellProcessor processor)
{
if (shellsBeforeAction == null) {
// cache the current shells, we will use the knowledge that a new
// one is imminent
// to determine when and which shell to close
TestUtil.shellsBeforeAction = PlatformUI.getWorkbench().getDisplay().getShells();
} else {
TestUtil.shellsBeforeAction = shellsBeforeAction;
}
// run a new thread which expires after 2 seconds
// this can be increased if anything takes longer than
// that to display (otherwise it is used for the case
// where
Thread processThread = new Thread(() -> {
long startTime = System.currentTimeMillis();
long runTime = System.currentTimeMillis() - startTime;
processed = false;
while (runTime < maxRunTime) {
if (PlatformUI.getWorkbench().getDisplay().isDisposed()) {
return;
}
PlatformUI.getWorkbench().getDisplay().syncExec(() -> {
Shell[] currentShells = PlatformUI.getWorkbench().getDisplay().getShells();
HashSet<Shell> uniqueSet = new HashSet<>(Arrays.asList(TestUtil.shellsBeforeAction));
// locate a unique shell in the latest
for (int i = currentShells.length; --i >= 0;) {
Shell shell = currentShells[i];
boolean added = uniqueSet.add(shell);
if (added) {
// unique shell, test to make sure it is
// not a temporary shell during setup of
// the one we expect
if (!(shell.getData() instanceof Object[])
&& !(shell.getData() instanceof ProgressMonitorDialog)
&& !(shell.getData() instanceof BlockedJobsDialog)
&& (!shell.getText().equals("") || (shell.getText().equals("")
&& shell.getData() instanceof WizardDialog))) {
// we have our new shell, process as we
// did before
processed = processor.processShell(shell);
if (processed) {
break;
}
}
}
}
if (!processed) {
TestUtil.shellsBeforeAction = currentShells;
}
});
if (processed) {
return;
}
sleep(50);
runTime = System.currentTimeMillis() - startTime;
}
});
// add thread to processor
shellProcessorThread.addThread(processThread);
}
static Shell[] currentShells = null;
public static void dismissDialog(final long inHowManyMillis,
final int currentRecursionDepth, final boolean shouldDismiss, final String button, final String treeItem, final boolean throwException)
{
dismissShell(shell -> {
dialogText = "";
// close the dialog
Control[] ctrls = ((Dialog) shell.getData()).getShell().getChildren();
for (int i = 0; i < ctrls.length; i++) {
Control ctrl = ctrls[i];
if (ctrl instanceof Label) {
dialogText = dialogText + ((Label) ctrl).getText();
}
}
if (shouldDismiss) {
((Dialog) shell.getData()).close();
} else if (button != null) {
Button foundButton = findButton(shell, button);
if (foundButton != null) {
foundButton.setSelection(true);
foundButton.notifyListeners(SWT.Selection, null);
return true;
}
} else if (treeItem != null) {
Tree tree = UITestingUtilities.findTree(shell);
if (tree != null) {
TreeItem item = UITestingUtilities.findItemInTree(tree, treeItem);
if (item != null) {
tree.select(item);
return true;
} else {
CorePlugin.logError("Unable to locate tree item in tree: " + treeItem, null);
}
} else {
CorePlugin.logError("Unable to locate a tree in the dialog.", null);
}
} else {
cancelDialog((Dialog) shell.getData());
}
return false;
});
}
//
public static void checkTableItems(final long inHowManyMillis,
final int currentRecursionDepth, final boolean shouldDismiss, final String actualResultFilePath )
{
dismissShell(shell -> {
if (shell != null && shell.getData() instanceof Dialog) {
// close the dialog
if (!(shell.getData() instanceof ProgressMonitorDialog)) {
Control[] ctrls = ((Dialog) shell.getData()).getShell().getChildren();
for (int i = 0; i < ctrls.length; i++) {
Control ctrl = ctrls[i];
if (ctrl instanceof Label) {
dialogText = dialogText + ((Label) ctrl).getText();
}
}
if (shouldDismiss) {
((Dialog) shell.getData()).close();
return true;
} else {
TableItem[] items = getTableItems(shell);
if (items.length > 0) {
String[] actualFirstColumn = new String[items.length];
String[] actualSecondColumn = new String[items.length];
for (int i = 0; i < items.length; i++) {
actualFirstColumn[i] = items[i].getText(0);
actualSecondColumn[i] = items[i].getText(1);
}
String output = createTable(actualFirstColumn, actualSecondColumn);
try {
FileWriter writer = new FileWriter(actualResultFilePath);
writer.write(output);
writer.flush();
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
Button foundButton = findButton(shell, "OK");
if (foundButton != null) {
foundButton.setSelection(true);
foundButton.notifyListeners(SWT.Selection, null);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
return true;
}
}
}
}
return false;
});
}
private static String createTable(String[] firstColumn,
String[] secondColumn) {
int shorterLenght;
int difference = firstColumn.length - secondColumn.length;
if ( difference > 0){
shorterLenght = secondColumn.length;
}
else{
shorterLenght = firstColumn.length;
}
String result = "";
int i = 0;
for ( ; i < shorterLenght; i++) {
result += firstColumn[i] + "\t" + secondColumn[i] + "\r\n";
}
return result;
}
//
private static void cancelDialog(Dialog dialog) {
Control bb = dialog.buttonBar;
Button cb = findButton(bb.getParent(), "Cancel");
cb.notifyListeners(SWT.Selection, null);
}
public static Button findButton(Composite parent, String buttonName) {
Control [] child_set = parent.getChildren();
for ( int i = 0; i < child_set.length; ++i )
{
if ( child_set[i] instanceof Button )
{
Button cc = (Button) child_set[i];
String l = cc.getText();
if ( l.equals(buttonName) || l.equals(buttonName.replaceAll("&", "")))
{
return cc;
}
}
else if ( child_set[i] instanceof Composite )
{
Button result = findButton((Composite)child_set[i], buttonName);
if ( result != null )
{
return result;
}
}
}
return null;
}
public static TableItem[] getTableItems(Composite parent){
Control [] child_set = parent.getChildren();
for ( int i = 0; i < child_set.length; ++i )
{
if ( child_set[i] instanceof ElementSelectionFlatView)
{
ElementSelectionFlatView page = (ElementSelectionFlatView)child_set[i];
Table table = page.getTable();
TableItem[] items = table.getItems();
return items;
}
else if (child_set[i] instanceof Table) {
return ((Table) child_set[i]).getItems();
}
else if ( child_set[i] instanceof Composite )
{
TableItem[] result = getTableItems((Composite)child_set[i]);
if ( result != null )
{
return result;
}
}
}
return null;
}
/**
* A shorthand method for telling the current thread to sleep for the given
* amount of milliseconds.
*/
public static void sleep(long millis)
{
try {
Thread.sleep(millis);
} catch (InterruptedException e) {}
}
/**
* Sleeps the current thread for the given amount of milliseconds, but
* periodically dispatches all pending UI events. Inserting a temporary
* call to this should "pause" the current test for the given duration,
* while still allowing the UI to be seen and manipulated during that time,
* which may be an aid in debugging.
*/
public static void sleepWithDispatchOfEvents(long millis)
{
// the given duration must be at least as long as our sleep interval,
// otherwise no sleeping will be done, below
final int sleepInterval = 10;
if (millis < sleepInterval) millis = sleepInterval;
// for each interval that makes up the given duration
for (int i = 0; i < millis / sleepInterval; i++) {
// sleep for this interval
sleep(sleepInterval);
// dispatch any pending UI events
while (Display.getCurrent().readAndDispatch());
}
}
/**
* Copies the given file to a new one at the given
* destination path. If a file is already at that
* location, it will be overwritten.
*
* For copying an Eclipse IFile, IFile.create() should
* be used instead of this.
*/
public static void copyFile(File file, String destPath)
{
try {
// open streams on the file and the
// destination location
File copy = new File(destPath);
FileInputStream in = new FileInputStream(file);
FileOutputStream out = new FileOutputStream(copy);
// transfer the bytes from the file to the copy
int c;
while ((c = in.read()) != -1)
out.write(c);
// close the streams
in.close();
out.close();
} catch (IOException e) {
CorePlugin.logError("Could not copy file", e);
}
}
/**
* Dispatches outstanding events when progress is complete.
*/
public static class DispatchOnDoneProgressMonitor extends NullProgressMonitor
{
/**
* Is set to true when the done() method completes.
*/
public boolean done = false;
/* (non-Javadoc)
* @see org.eclipse.core.runtime.IProgressMonitor#done()
*/
public void done() {
Display display = Display.getCurrent();
while (display.readAndDispatch());
done = true;
}
}
/**
* Copies a class file of the given name from the development
* workspace project of the given name into the given test
* workspace project.
*/
public static IFile copyClassFileIntoProject(String className,
String copyFromProjectName, IProject toProject)
{
// locate the required file in the development workspace
File workspaceSource = TestingUtilities.getSourceDirectory().
toFile().getParentFile();
File file = new File(workspaceSource, copyFromProjectName +
"/bin/lib/" + className + ".class");
IFile resource = toProject.getFile("/bin/lib/"
+ file.getName());
File copyFile = resource.getLocation().toFile();
// copy the test file into the given project (and wait
// until the resulting model-events have been dispatched,
// before proceeding)
copyFile(file, copyFile.getAbsolutePath());
try {
FileInputStream stream = new FileInputStream(file);
DispatchOnDoneProgressMonitor monitor =
new DispatchOnDoneProgressMonitor();
resource.create(stream, true, monitor);
while (!monitor.done) TestUtil.sleep(10);
stream.close();
} catch (Exception e) {
CorePlugin.logError("Could not copy test class into project. reason: ", e);
}
return resource;
}
/**
* A convenience method for opening the given project.
*/
public static void openProject(IProject project)
{
try {
project.open(new NullProgressMonitor());
} catch (CoreException e) {
CorePlugin.logError("Could not open project", e);
}
}
/**
* A convenience method for closing the given project.
*/
public static void closeProject(IProject project)
{
try {
project.close(new NullProgressMonitor());
} catch (CoreException e) {
CorePlugin.logError("Could not close project", e);
}
}
/**
* A convenience method for closing the given project.
*/
public static void deleteProject(IProject project)
{
try {
project.delete(false, true, new NullProgressMonitor());
} catch (CoreException e) {
CorePlugin.logError("Could not delete project", e);
}
}
/**
* Makes the BridgePoint perspective the one that is currently active
* within the Eclipse IDE. Returns the page on which the perspective
* is shown.
*/
public static IWorkbenchPage showBridgePointPerspective()
{
return CanvasUtilities.showBridgePointPerspective();
}
/**
* Is the multi-valued return value of
* createTestProjectAndImportModel(), below.
*/
public static class Result1
{
public Ooaofooa modelRoot;
public IProject project;
public IFile file;
}
/**
* Changes the given file's readonly status
*
* @param readonly - A boolean used to determine what status to set the file
* to
* @param modelFile - The file in which the status should be altered
*/
public static void changeFileReadonlyStatus(boolean readonly, IFile modelFile) {
ResourceAttributes resourceAttributes = modelFile
.getResourceAttributes();
if (resourceAttributes != null) {
resourceAttributes.setReadOnly(readonly);
try {
modelFile.setResourceAttributes(resourceAttributes);
} catch (CoreException e) {
CorePlugin.logError("Core Exception", e);
}
}
BaseTest.dispatchEvents();
}
/**
* Returns the concatenation of the given array of strings into one string,
* with each of the smaller strings on a new line.
*
* Note that any line-feed characters within the strings will be
* stripped out before writing, to be consistent for comparison
* purposes with what this class's writeToFile() method does.
*/
public static String join(String[] strings)
{
// for each string in the given array
StringBuffer buffer = new StringBuffer();
String lineSeparator = System.getProperty("line.separator");
for (int i = 0; i < strings.length; i++) {
// strip out any line-feeds from this string, to be consistent
// with what this class's writeToFile() does, in case a result
// from this method is compared with one of that method
String string = strings[i].replaceAll("\n", "");
// append this string to the joined string we are building
if (i > 0) buffer.append(lineSeparator);
buffer.append(string);
}
return buffer.toString();
}
/**
* Returns the result of joining the two arrays of strings together
* into one array.
*/
public static String[] join(String[] strings1, String[] strings2)
{
// if the second array is empty, just return the first
if (strings2.length == 0) return strings1;
// for each string in the given two arrays
String[] result = new String[strings1.length + strings2.length];
for (int i = 0; i < strings1.length + strings2.length; i++) {
// put this string into our result, at the right place
result[i] = (i < strings1.length) ?
strings1[i] : strings2[i - strings1.length];
}
return result;
}
/**
* Returns the contents of the given text file as a string.
*/
public static String getTextFileContents(File file) {
return getTextFileContents(file, true);
}
public static String getTextFileContents(File file, boolean trim)
{
try {
// open a reader on the file
FileInputStream in = new FileInputStream(file);
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
// keep doing this
StringBuffer contents = new StringBuffer();
String lineSeparator = System.getProperty("line.separator");
int linesRead = 0;
while (true) {
// get the next line of text from the file
String line = reader.readLine();
// if we've hit the end of the file, we're done
if (line == null) break;
// add the line to the results string we're building
if (linesRead > 0) contents.append(lineSeparator);
if(trim) {
contents.append(line.trim());
} else {
contents.append(line);
}
linesRead++;
}
reader.close();
return contents.toString();
} catch (IOException e) {
TestCase.fail(e.getLocalizedMessage());
return null;
}
}
/**
* Writes the given array of strings out to a text file of the given name
* (including path), one string per line.
*
* Note that any line-feed characters within the strings will be
* stripped out before writing, as they tend to complicate comparisons
* of the contents of the file after they are read back in.
*
* @return Whether the write was successful.
*/
public static boolean writeToFile(String[] strings, String pathName)
{
try {
// get the current file contents for comparison
// if there is no difference then we don't need
// to update the result
File resultFile = new File(pathName);
if(resultFile.exists()) {
byte[] fileBytes = new byte[(int)resultFile.length()];
FileInputStream fis = new FileInputStream(resultFile);
fis.read(fileBytes);
fis.close();
String fileContents = new String(fileBytes);
String[] currentContents = null;
if(fileContents.indexOf("\r") != -1) {
currentContents = fileContents.split("\r\n");
} else {
currentContents = fileContents.split("\n");
}
if(stringArraysAreEqual(currentContents, strings)) {
return false;
}
}
// create the file of the given path and name
FileWriter writer = new FileWriter(pathName);
// for each element in the given strings array
String lineSeparator = System.getProperty("line.separator");
for (int i = 0; i < strings.length; i++) {
// strip out any line-feeds from this string, as they
// will cause it to be treated as two or more separate
// strings when it is read back in, which screws up
// comparisons
String string = strings[i].replaceAll("\n", "");
// write this string out to a new line in the text file
if (i > 0) writer.write(lineSeparator);
writer.write(string);
}
writer.flush();
writer.close();
}
catch (Exception e) {
CorePlugin.logError("Could not write strings to text file", e);
return false;
}
return true;
}
private static boolean stringArraysAreEqual(String[] currentContents, String[] strings) {
// if the lengths are different then they are
// not equal
if(currentContents.length != strings.length) {
return false;
}
// otherwise compare each array value
for(int i = 0; i < currentContents.length; i++) {
String string = strings[i].replaceAll("\n", "");
String currentString = currentContents[i].replaceAll("\n", "");
if(!currentString.equals(string)) {
return false;
}
}
return true;
}
public static FailableRunnable chooseItemInDialog(final int sleep, final String item, Shell[] existingShells) {
return chooseItemInDialog(sleep, item, false, existingShells);
}
public static FailableRunnable chooseItemInDialog(final int sleep, final String item, final boolean locateOnly, Shell[] existingShells) {
return chooseItemInDialog(sleep, item, locateOnly, false, existingShells);
}
public static FailableRunnable chooseItemInDialog(final int sleep, final String item, final boolean locateOnly, final boolean testNonExistence, Shell[] existingShells) {
return chooseItemInDialog(sleep, null, new String[] {item}, locateOnly, testNonExistence, existingShells);
}
public static FailableRunnable toggleButtonInElementSelectionDialog(final int sleep, final String buttonName, Shell[] existingShells) {
return toggleButtonInElementSelectionDialog(sleep, null, buttonName, existingShells);
}
public static void cancelElementSelectionDialog(final int sleep, Shell[] existingShells) {
cancelElementSelectionDialog(sleep, null, existingShells);
}
public static void cancelElementSelectionDialog(final int sleep, final FailableRunnable waitRunnable, Shell[] existingShells) {
Thread cancelThread = new Thread(new Runnable() {
@Override
public void run() {
if(waitRunnable != null) {
waitRunnable.join();
}
PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() {
@Override
public void run() {
processShell(existingShells, shell -> {
if (shell != null && shell.getData() instanceof ElementSelectionDialog) {
ElementSelectionDialog dialog = (ElementSelectionDialog) shell.getData();
dialog.close();
}
return true;
});
}
});
}
});
cancelThread.start();
}
protected static void waitForRunnable(FailableRunnable waitRunnable) {
// if there is a runnable given, wait for
// it to complete
if(waitRunnable != null) {
// wait for a max of 60s
int sleepTime = 0;
while(!waitRunnable.getComplete()) {
if(sleepTime > 60000) {
break;
}
sleep(1000);
sleepTime = sleepTime + 1000;
}
}
}
public static void okElementSelectionDialog(final FailableRunnable runnable, Shell[] existingShells) {
Thread cancelThread = new Thread(new Runnable() {
@Override
public void run() {
waitForRunnable(runnable);
processShell(existingShells, shell -> {
if (shell != null) {
ElementSelectionDialog dialog = (ElementSelectionDialog) shell.getData();
if (dialog.getOkButton().isEnabled()) {
dialog.getOkButton().notifyListeners(SWT.Selection, new Event());
}
}
return true;
});
}
});
cancelThread.start();
}
public static FailableRunnable chooseItemInDialog(
FailableRunnable runnable, String item, boolean locateOnly, Shell[] existingShells) {
return chooseItemInDialog(0, runnable, new String[]{item}, locateOnly, false, existingShells);
}
public static FailableRunnable chooseItemInDialog(final int sleep, final FailableRunnable waitRunnable,
final String item, final boolean locateOnly, final boolean testNonExistence, Shell[] existingShells) {
return chooseItemInDialog(sleep, waitRunnable, item, locateOnly, testNonExistence, existingShells);
}
static int foundItemInDialog = 0;
public static FailableRunnable chooseItemInDialog(final int sleep, final FailableRunnable waitRunnable,
final String[] targetItems, final boolean locateOnly, final boolean testNonExistence, Shell[] existingShells) {
FailableRunnable runnable = new FailableRunnable() {
@Override
public void run() {
sleep(sleep);
waitForRunnable(waitRunnable);
FailableRunnable innerRunnable = new FailableRunnable() {
@Override
public void run() {
processShell(existingShells, shell -> {
List<String> foundItems = new ArrayList<>();
List<String> notFoundItems = new ArrayList<>(Arrays.asList(targetItems));
if (shell != null) {
Dialog dialog = (Dialog) shell.getData();
Control[] children = dialog.getShell().getChildren();
for (int i = 0; i < children.length; i++) {
Table table = findTable(children);
if (table != null) {
// if a deselect all button is present
// press it before selecting the desired
// item
Button deselect = findButton(shell, "&Deselect All");
if (deselect != null) {
deselect.notifyListeners(SWT.Selection, new Event());
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
}
TableItem[] items = table.getItems();
for (String item : targetItems) {
for (int j = 0; j < items.length; j++) {
if (items[j].getText().equals(item)) {
// do not select if locateOnly is true
if (!locateOnly) {
List<TableItem> currentSelection = new ArrayList<>(Arrays.asList(table.getSelection()));
currentSelection.add(items[j]);
table.setSelection(currentSelection.toArray(new TableItem[0]));
Event event = new Event();
event.item = items[j];
table.notifyListeners(SWT.Selection, event);
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
}
foundItemInDialog++;
foundItems.add(item);
notFoundItems.remove(item);
break;
}
}
}
break;
}
}
}
if (testNonExistence) {
if (foundItemInDialog == targetItems.length) {
setFailure("Found the unexpected item(s) in the selection dialog (" + StringUtils.join(foundItems, ", ") + ").");
}
} else {
if (foundItemInDialog != targetItems.length) {
setFailure("Could not locate the expected item(s) in the selection dialog (" + StringUtils.join(notFoundItems, ", ")
+ ").");
}
}
foundItemInDialog = 0;
setComplete();
return true;
});
}
private Table findTable(Control[] children) {
for (Control child : children) {
if (child instanceof Table) {
return (Table) child;
} else if (child instanceof Composite) {
Table result = findTable(((Composite) child).getChildren());
if (result != null) {
return result;
}
}
}
return null;
}
};
if(!PlatformUI.getWorkbench().getDisplay().isDisposed()) {
// must be run in the UI thread
PlatformUI.getWorkbench().getDisplay().syncExec(innerRunnable);
waitForRunnable(innerRunnable);
if(!innerRunnable.getFailure().equals("")) {
setFailure(innerRunnable.getFailure());
}
}
setComplete();
}
};
Thread chooserThread = new Thread(runnable);
chooserThread.start();
return runnable;
}
public static FailableRunnable toggleButtonInElementSelectionDialog(final int sleep,
final FailableRunnable waitRunnable, final String buttonName, Shell[] existingShells) {
FailableRunnable runnable = new FailableRunnable() {
@Override
public void run() {
sleep(sleep);
waitForRunnable(waitRunnable);
FailableRunnable innerRunnable = new FailableRunnable() {
@Override
public void run() {
processShell(existingShells, shell -> {
if (shell != null) {
ElementSelectionDialog dialog = (ElementSelectionDialog) shell.getData();
ElementSelectionFlatView view = dialog.getFlatView();
Control[] children = view.getChildren();
for (int i = 0; i < children.length; i++) {
if (children[i] instanceof Button) {
Button button = (Button) children[i];
if (button.getText().equals(buttonName)) {
button.setSelection((button.getSelection()) ? false : true);
button.notifyListeners(SWT.Selection, new Event());
view.redraw();
view.update();
setComplete();
while (PlatformUI.getWorkbench().getDisplay().readAndDispatch())
;
return true;
}
}
}
// if we get here add an error to the thread, as
// the button could not be found
setFailure("Unable to locate button in selection dialog: " + buttonName);
setComplete();
}
return true;
});
}
};
// must be run in the UI thread
PlatformUI.getWorkbench().getDisplay().syncExec(innerRunnable);
if(!innerRunnable.getFailure().equals("")) {
setFailure(innerRunnable.getFailure());
}
setComplete();
}
};
Thread chooserThread = new Thread(runnable);
chooserThread.start();
return runnable;
}
public static void executeInTransaction(NonRootModelElement element, String method, Object[] parameters) {
executeInTransaction(element, method, parameters, true);
}
public static void executeInTransaction(NonRootModelElement element, String method, Object[] parameters, boolean undoable) {
Class<?>[] paramClasses = new Class<?>[parameters.length];
for(int i = 0; i < parameters.length; i++) {
if(parameters[i] instanceof Integer) {
paramClasses[i] = Integer.TYPE;
} else if(parameters[i] instanceof Boolean) {
paramClasses[i] = Boolean.TYPE;
} else {
paramClasses[i] = parameters[i].getClass();
}
}
Transaction transaction = null;
TransactionManager manager = TransactionManager.getSingleton();
try {
transaction = manager.startTransaction("test transaction",
new ModelElement[] { Ooaofooa.getDefaultInstance(),
Ooaofgraphics.getDefaultInstance() }, undoable);
Method m = element.getClass().getMethod(method, paramClasses);
m.invoke(element, parameters);
manager.endTransaction(transaction);
} catch (Exception e) {
if(transaction != null) {
manager.cancelTransaction(transaction, e);
}
CorePlugin.logError("Unable to complete transaction.", e);
}
BaseTest.dispatchEvents(200);
}
}
|
job #10525 clean up TestUtil file
|
src/org.xtuml.bp.test/src/org/xtuml/bp/test/TestUtil.java
|
job #10525 clean up TestUtil file
|
|
Java
|
apache-2.0
|
adbf7ffa0e116103a5060f6b67ceca6945b7a266
| 0
|
fossamagna/liquibase,jimmycd/liquibase,Datical/liquibase,jimmycd/liquibase,Datical/liquibase,fossamagna/liquibase,jimmycd/liquibase,liquibase/liquibase,mattbertolini/liquibase,Datical/liquibase,Datical/liquibase,liquibase/liquibase,fossamagna/liquibase,mattbertolini/liquibase,liquibase/liquibase,mattbertolini/liquibase,mattbertolini/liquibase,jimmycd/liquibase
|
package liquibase.diff.output.changelog.core;
import liquibase.change.Change;
import liquibase.change.core.AddUniqueConstraintChange;
import liquibase.database.Database;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.diff.output.DiffOutputControl;
import liquibase.diff.output.changelog.AbstractChangeGenerator;
import liquibase.diff.output.changelog.ChangeGeneratorChain;
import liquibase.diff.output.changelog.ChangeGeneratorFactory;
import liquibase.diff.output.changelog.MissingObjectChangeGenerator;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MissingUniqueConstraintChangeGenerator extends AbstractChangeGenerator implements MissingObjectChangeGenerator {
@Override
public int getPriority(Class<? extends DatabaseObject> objectType, Database database) {
if (UniqueConstraint.class.isAssignableFrom(objectType)) {
return PRIORITY_DEFAULT;
}
return PRIORITY_NONE;
}
@Override
public Class<? extends DatabaseObject>[] runAfterTypes() {
return new Class[]{
Table.class,
Column.class
};
}
@Override
public Class<? extends DatabaseObject>[] runBeforeTypes() {
return new Class[]{Index.class};
}
@Override
public Change[] fixMissing(DatabaseObject missingObject, DiffOutputControl control, Database referenceDatabase, Database comparisonDatabase, ChangeGeneratorChain chain) {
List<Change> returnList = new ArrayList<Change>();
UniqueConstraint uc = (UniqueConstraint) missingObject;
if (uc.getTable() == null) {
return null;
}
AddUniqueConstraintChange change = createAddUniqueConstraintChange();
change.setTableName(uc.getTable().getName());
if (uc.getBackingIndex() != null && control.getIncludeTablespace()) {
change.setTablespace(uc.getBackingIndex().getTablespace());
}
if (control.getIncludeCatalog()) {
change.setCatalogName(uc.getTable().getSchema().getCatalogName());
}
if (control.getIncludeSchema()) {
change.setSchemaName(uc.getTable().getSchema().getName());
}
change.setConstraintName(uc.getName());
change.setColumnNames(uc.getColumnNames());
change.setDeferrable(uc.isDeferrable() ? Boolean.TRUE : null);
change.setValidate(!uc.shouldValidate() ? Boolean.FALSE : null);
change.setInitiallyDeferred(uc.isInitiallyDeferred() ? Boolean.TRUE : null);
change.setDisabled(uc.isDisabled() ? Boolean.TRUE : null);
if (referenceDatabase instanceof MSSQLDatabase) {
change.setClustered(uc.isClustered() ? Boolean.TRUE : null);
}
if (comparisonDatabase instanceof OracleDatabase) {
Index backingIndex = uc.getBackingIndex();
if (backingIndex != null && backingIndex.getName() != null) {
boolean found = indexMatchesExisting(uc, backingIndex);
if (! found) {
Change[] changes = ChangeGeneratorFactory.getInstance().fixMissing(backingIndex, control, referenceDatabase, comparisonDatabase);
if (changes != null) {
returnList.addAll(Arrays.asList(changes));
change.setForIndexName(backingIndex.getName());
Schema schema = backingIndex.getSchema();
if (schema != null) {
if (control.getIncludeCatalog()) {
change.setForIndexCatalogName(schema.getCatalogName());
}
if (control.getIncludeSchema()) {
change.setForIndexSchemaName(schema.getName());
}
}
}
}
}
}
Index backingIndex = uc.getBackingIndex();
// if (backingIndex == null) {
// Index exampleIndex = new Index().setTable(uc.getTable());
// for (String col : uc.getColumns()) {
// exampleIndex.getColumns().add(col);
// }
// control.setAlreadyHandledMissing(exampleIndex);
// } else {
control.setAlreadyHandledMissing(backingIndex);
// }
returnList.add(change);
return returnList.toArray(new Change[returnList.size()]);
}
private boolean indexMatchesExisting(UniqueConstraint uc, Index backingIndex) {
boolean found = false;
Table table = (Table)uc.getTable();
List<Index> indexList = table.getIndexes();
for (Index index : indexList) {
if (index.getName().equals(backingIndex.getName())) {
found = true;
}
}
return found;
}
protected AddUniqueConstraintChange createAddUniqueConstraintChange() {
return new AddUniqueConstraintChange();
}
}
|
liquibase-core/src/main/java/liquibase/diff/output/changelog/core/MissingUniqueConstraintChangeGenerator.java
|
package liquibase.diff.output.changelog.core;
import liquibase.change.Change;
import liquibase.change.core.AddUniqueConstraintChange;
import liquibase.database.Database;
import liquibase.database.core.MSSQLDatabase;
import liquibase.database.core.OracleDatabase;
import liquibase.diff.output.DiffOutputControl;
import liquibase.diff.output.changelog.AbstractChangeGenerator;
import liquibase.diff.output.changelog.ChangeGeneratorChain;
import liquibase.diff.output.changelog.ChangeGeneratorFactory;
import liquibase.diff.output.changelog.MissingObjectChangeGenerator;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class MissingUniqueConstraintChangeGenerator extends AbstractChangeGenerator implements MissingObjectChangeGenerator {
@Override
public int getPriority(Class<? extends DatabaseObject> objectType, Database database) {
if (UniqueConstraint.class.isAssignableFrom(objectType)) {
return PRIORITY_DEFAULT;
}
return PRIORITY_NONE;
}
@Override
public Class<? extends DatabaseObject>[] runAfterTypes() {
return new Class[]{
Table.class,
Column.class
};
}
@Override
public Class<? extends DatabaseObject>[] runBeforeTypes() {
return new Class[]{Index.class};
}
@Override
public Change[] fixMissing(DatabaseObject missingObject, DiffOutputControl control, Database referenceDatabase, Database comparisonDatabase, ChangeGeneratorChain chain) {
List<Change> returnList = new ArrayList<Change>();
UniqueConstraint uc = (UniqueConstraint) missingObject;
if (uc.getTable() == null) {
return null;
}
AddUniqueConstraintChange change = createAddUniqueConstraintChange();
change.setTableName(uc.getTable().getName());
if (uc.getBackingIndex() != null && control.getIncludeTablespace()) {
change.setTablespace(uc.getBackingIndex().getTablespace());
}
if (control.getIncludeCatalog()) {
change.setCatalogName(uc.getTable().getSchema().getCatalogName());
}
if (control.getIncludeSchema()) {
change.setSchemaName(uc.getTable().getSchema().getName());
}
change.setConstraintName(uc.getName());
change.setColumnNames(uc.getColumnNames());
change.setDeferrable(uc.isDeferrable() ? Boolean.TRUE : null);
change.setValidate(!uc.shouldValidate() ? Boolean.FALSE : null);
change.setInitiallyDeferred(uc.isInitiallyDeferred() ? Boolean.TRUE : null);
change.setDisabled(uc.isDisabled() ? Boolean.TRUE : null);
if (referenceDatabase instanceof MSSQLDatabase) {
change.setClustered(uc.isClustered() ? Boolean.TRUE : null);
}
if (comparisonDatabase instanceof OracleDatabase) {
Index backingIndex = uc.getBackingIndex();
if (backingIndex != null && backingIndex.getName() != null) {
boolean found = indexMatchesExisting(uc, backingIndex);
if (! found) {
Change[] changes = ChangeGeneratorFactory.getInstance().fixMissing(backingIndex, control, referenceDatabase, comparisonDatabase);
if (changes != null) {
returnList.addAll(Arrays.asList(changes));
change.setForIndexName(backingIndex.getName());
Schema schema = backingIndex.getSchema();
if (schema != null) {
if (control.getIncludeCatalog()) {
change.setForIndexCatalogName(schema.getCatalogName());
}
if (control.getIncludeSchema()) {
change.setForIndexSchemaName(schema.getName());
}
}
}
}
}
}
Index backingIndex = uc.getBackingIndex();
// if (backingIndex == null) {
// Index exampleIndex = new Index().setTable(uc.getTable());
// for (String col : uc.getColumns()) {
// exampleIndex.getColumns().add(col);
// }
// control.setAlreadyHandledMissing(exampleIndex);
// } else {
control.setAlreadyHandledMissing(backingIndex);
// }
returnList.add(change);
return returnList.toArray(new Change[returnList.size()]);
}
public boolean indexMatchesExisting(UniqueConstraint uc, Index backingIndex) {
boolean found = false;
Table table = (Table)uc.getTable();
List<Index> indexList = table.getIndexes();
for (Index index : indexList) {
if (index.getName().equals(backingIndex.getName())) {
found = true;
}
}
return found;
}
protected AddUniqueConstraintChange createAddUniqueConstraintChange() {
return new AddUniqueConstraintChange();
}
}
|
DAT-1926 Make method private
|
liquibase-core/src/main/java/liquibase/diff/output/changelog/core/MissingUniqueConstraintChangeGenerator.java
|
DAT-1926 Make method private
|
|
Java
|
apache-2.0
|
d037a74d303d969c7a4fc3f3db241608e721e76f
| 0
|
isandlaTech/cohorte-devtools,isandlaTech/cohorte-devtools,isandlaTech/cohorte-devtools,isandlaTech/cohorte-devtools,isandlaTech/cohorte-devtools
|
package org.cohorte.eclipse.felix.config.generator;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.psem2m.utilities.CXException;
import org.psem2m.utilities.files.CXFile;
import org.psem2m.utilities.files.CXFileDir;
import org.psem2m.utilities.files.CXFileUtf8;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* class that read a launch configuration to generator a config.properties that
* is use by felix to launch framework
*
* @author apisu
*
*/
@Mojo(name = "generate-config", defaultPhase = LifecyclePhase.COMPILE)
public class ConfigGenerator extends AbstractMojo {
@Parameter(defaultValue = "${project}", required = true, readonly = true)
MavenProject project;
// base new config file on this config file
@Parameter(property = "base.felix.config.file.path")
String sourceConfigFile;
@Parameter(property = "launch.eclipse.file.path")
String launchEclipseFile;
@Parameter(property = "shell.felix.jar.file.path")
String shellFelixJarFilePath;
@Parameter(property = "shell.felix.config.file.path")
String shellFelixConfigFilePath;
@Parameter(property = "felix.cache.rootdir")
String felixCacheRootDir;
@Parameter(property = "shell.vmarguments")
String overrideShellArgument;
@Parameter(property = "shell.vmarguments.file.path")
String overrideShellArgumentFilePath;
@Parameter(property = "target.shell.file.path")
String targetLaunchJvmFile;
@Parameter(property = "target.config.file.path")
String pathTargerConfigFile;
// can express multiple folder with ";" separator. the property express also
// pair of path in local disk and path in target disk for config
// eg cohorte-home/repo:opt/cohorte/repo; another path
@Parameter(property = "bundle.jar.directories")
String pPathBundleTarget;
private final Properties pProperties = new Properties();
private final Map<String, String> pMapSymbolicNameToJarPath = new HashMap<>();
/**
* return the symolic bundle name in jar file if t's a bundle else null
*
* @return
* @throws IOException
*/
private String getBundleSymbolicNameFromJar(String aFilePath) throws IOException {
getLog().debug(String.format("getBundleSymbolicNameFromJar file =[%s]", aFilePath));
String wSymbolicName = null;
final ZipFile wZipFile = new ZipFile(aFilePath);
try {
final Enumeration<? extends ZipEntry> wEntries = wZipFile.entries();
while (wEntries.hasMoreElements()) {
final ZipEntry wEntry = wEntries.nextElement();
if (wEntry.getName().contains("MANIFEST.MF")) {
final InputStream wStream = wZipFile.getInputStream(wEntry);
final String wContent = new BufferedReader(new InputStreamReader(wStream)).lines()
.collect(Collectors.joining("\n"));
wStream.close();
final String[] wLines = wContent.split("\n");
for (int i = 0; i < wLines.length && wSymbolicName == null; i++) {
final String wLine = wLines[i];
if (wLine.contains("Bundle-SymbolicName: ")) {
wSymbolicName = wLine.replace("Bundle-SymbolicName: ", "");
getLog().debug(String.format("symbolic name=[%s] file =[%s]", wSymbolicName, aFilePath));
}
}
}
}
return wSymbolicName;
} finally {
wZipFile.close();
}
}
/**
* list bundle jar file in this directory with symbolic name
*
* @param aDir
*
* @return
* @throws IOException
*/
private void analyseDir(CXFileDir aDir) throws IOException {
getLog().debug(String.format("analyseDir =[%s]", aDir.getAbsolutePath()));
for (final String wFile : aDir.list()) {
getLog().debug(String.format("dir=[%s] file =[%s]", aDir.getAbsolutePath(), wFile));
if (wFile.endsWith(".jar")) {
// check if it's a bundle
final String wFullFilePath = aDir.getAbsolutePath() + File.separatorChar + wFile;
String wSymbolicBundleName = getBundleSymbolicNameFromJar(wFullFilePath);
if (wSymbolicBundleName != null) {
if (wSymbolicBundleName.contains(";")) {
wSymbolicBundleName = wSymbolicBundleName.split(";")[0];
}
getLog().debug(String.format("===>symbolicName=[%s] \n, path jar=[%s]", wSymbolicBundleName,
wFullFilePath));
pMapSymbolicNameToJarPath.put(wSymbolicBundleName, wFullFilePath);
}
} else {
final CXFileDir wSubDir = new CXFileDir(aDir, wFile);
if (wSubDir.exists() && wSubDir.isDirectory()) {
analyseDir(wSubDir);
}
}
}
}
private Map<String, String> analyseDirectory(String aPathBundleTarget) throws MojoExecutionException {
final Map<String, String> wDirsBundleLocation = new HashMap<>();
try {
final String wPathBundle = aPathBundleTarget.replaceAll("\n", "").replaceAll("\t", "").replaceAll(" ", "");
getLog().info(String.format("analyseDirectory dir=[%s]!", wPathBundle));
if (wPathBundle.contains(";")) {
for (final String wPathPair : wPathBundle.split(";")) {
if (wPathPair.contains(",")) {
final String wPathLocalDir = wPathPair.split(",")[0];
final String wPathTargetDir = wPathPair.split(",")[1];
final CXFileDir wDir = new CXFileDir(wPathLocalDir);
if (wDir.isDirectory() && wDir.exists()) {
getLog().info(String.format("add dir local=[%s]!", wPathLocalDir));
getLog().info(String.format("add dir target=[%s]!", wPathTargetDir));
analyseDir(wDir);
wDirsBundleLocation.put(wDir.getAbsolutePath(), wPathTargetDir);
}
}
}
} else {
if (wPathBundle.contains(",")) {
final String wPathLocalDir = wPathBundle.split(",")[0];
final String wPathTargetDir = wPathBundle.split(",")[1];
final CXFileDir wDir = new CXFileDir(wPathLocalDir);
if (wDir.isDirectory() && wDir.exists()) {
getLog().info(String.format("add dir local=[%s]!", wPathLocalDir));
getLog().info(String.format("add dir target=[%s]!", wPathTargetDir));
analyseDir(wDir);
wDirsBundleLocation.put(wDir.getAbsolutePath(), wPathTargetDir);
}
}
}
return wDirsBundleLocation;
} catch (final Exception e) {
getLog().error(String.format("fail to analyse directory %s error=[%s]!", pPathBundleTarget,
CXException.eInString(e)));
throw new MojoExecutionException(String.format("fail to analyse directory %s error=[%s]!",
aPathBundleTarget, CXException.eInString(e)));
}
}
private Document getDocumentFromLauncherFile(String aLaunchConfigFile)
throws ParserConfigurationException, SAXException, IOException {
final DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder;
dBuilder = dbFactory.newDocumentBuilder();
return dBuilder.parse(aLaunchConfigFile);
}
private String getVMParameter(Document wDocLauncherEclipse)
throws SAXException, IOException, ParserConfigurationException {
String wVmArgument = null;
final NodeList wNodeList = wDocLauncherEclipse.getElementsByTagName("stringAttribute");
// checkk which string property describe list of bundle
for (int x = 0; x < wNodeList.getLength() && wVmArgument == null; x++) {
final Node wNode = wNodeList.item(x);
if (wNode instanceof Element) {
final Element wElement = (Element) wNode;
final String wValue = null;
if (wElement.getAttribute("key") != null) {
if (wElement.getAttribute("key").equals("org.eclipse.jdt.launching.VM_ARGUMENTS")) {
wVmArgument = wElement.getAttribute("value");
}
}
}
}
String wVmArgUsableInShell = "";
final String[] wLines = wVmArgument.split("\n");
final Map<String, String> wMapOverrideArgument = new HashMap<>();
if (overrideShellArgumentFilePath != null) {
getLog().debug(String.format("file override argument vm %s", overrideShellArgumentFilePath));
final CXFileUtf8 wFileContentOverrideVmArg = new CXFileUtf8(overrideShellArgumentFilePath);
if (wFileContentOverrideVmArg.exists()) {
overrideShellArgument = wFileContentOverrideVmArg.readAll();
getLog().debug(String.format("content file override argument vm %s", overrideShellArgument));
}
} else {
getLog().debug(String.format("no file override argument vm %s", overrideShellArgumentFilePath));
}
if (overrideShellArgument != null) {
final List<String> wOverrideShellArgument = Arrays.asList(overrideShellArgument.split("\n"));
for (final String wOverrideArg : wOverrideShellArgument) {
if (wOverrideArg.contains("=")) {
final String[] wSplit = wOverrideArg.split("=");
String wArgumentKey = wSplit[0];
getLog().debug(String.format("argument override key %s", wArgumentKey));
wArgumentKey = wArgumentKey.replaceAll(" ", "").replaceAll("\t", "");
if (wArgumentKey.startsWith("-D")) {
wArgumentKey = wArgumentKey.substring(2);
}
if (wSplit.length > 1) {
wMapOverrideArgument.put(wArgumentKey, wSplit[1]);
} else {
wMapOverrideArgument.put(wArgumentKey, "");
}
}
}
} else {
getLog().debug(String.format("no override argumet"));
}
for (final String wLine : wLines) {
if (wLine.trim().length() > 0) {
String wArgumentKey = null;
if (wLine.contains("=")) {
wArgumentKey = wLine.split("=")[0];
if (wArgumentKey.startsWith("-D")) {
wArgumentKey = wArgumentKey.substring(2);
}
}
if (wArgumentKey != null) {
getLog().debug(String.format("argument key %s", wArgumentKey));
if (wMapOverrideArgument.keySet().contains(wArgumentKey)) {
// wVmArgUsableInShell += "# override launch eclipse vm argument by maven
// task\n";
wVmArgUsableInShell += "\t-D" + wArgumentKey + "=" + wMapOverrideArgument.get(wArgumentKey)
+ " \\\n";
} else {
wVmArgUsableInShell += "\t" + wLine + " \\\n";
}
} else {
wVmArgUsableInShell += "\t" + wLine + " \\\n";
}
}
}
getLog().debug(String.format("vm arguments [%s]", wVmArgUsableInShell));
return wVmArgUsableInShell;
}
private List<String> getListSymbolicBundleNameToAdd(Document wDocLauncherEclipse)
throws SAXException, IOException, ParserConfigurationException {
final List<String> wLisSymbolicBundleName = new ArrayList<>();
final NodeList wNodeList = wDocLauncherEclipse.getElementsByTagName("stringAttribute");
// checkk which string property describe list of bundle
for (int x = 0; x < wNodeList.getLength(); x++) {
final Node wNode = wNodeList.item(x);
if (wNode instanceof Element) {
final Element wElement = (Element) wNode;
String wValue = null;
if (wElement.getAttribute("key") != null) {
if (wElement.getAttribute("key").equals("workspace_bundles")) {
// bundle of the current project
wValue = wElement.getAttribute("value");
} else if (wElement.getAttribute("key").equals("target_bundles")) {
// bundle of the current project
wValue = wElement.getAttribute("value");
}
}
if (wValue != null) {
for (final String wBundleName : wValue.split(",")) {
if (wBundleName.contains("@")) {
wLisSymbolicBundleName.add(wBundleName.split("@")[0]);
}
}
}
// search bundle path in repo
}
}
getLog().debug(String.format("symbolic name bundle to add [%s]", wLisSymbolicBundleName));
return wLisSymbolicBundleName;
}
private void createJvmShell(Document aLauncherEclipseDom)
throws MojoExecutionException, IOException, SAXException, ParserConfigurationException {
final String wShellFormat = "#!/bin/sh\njava %s -Dfelix.config.properties=file:/%s -Dfile.encoding=UTF-8 -jar %s bundle-cache -consoleLog -console";
final String wVmArgument = getVMParameter(aLauncherEclipseDom);
final String wShell = String.format(wShellFormat, wVmArgument, shellFelixConfigFilePath, shellFelixJarFilePath);
getLog().info(String.format("shell launch jvm =[%s]", wShell));
if (targetLaunchJvmFile != null) {
final CXFileUtf8 wShellFile = new CXFileUtf8(targetLaunchJvmFile);
wShellFile.getParentDirectory().mkdirs();
wShellFile.writeAll(wShell);
}
}
private void createConfigFelixFile(String aFileBaseConfifPath, Document aLauncherEclipseDom)
throws MojoExecutionException, IOException, SAXException, ParserConfigurationException {
final Map<String, String> wDirsBundleLocation = analyseDirectory(pPathBundleTarget);
if (aFileBaseConfifPath != null) {
// load property from file
if (aFileBaseConfifPath.startsWith("http://") || aFileBaseConfifPath.startsWith("https://")) {
// get content by using http
final HttpGet wGet = new HttpGet(aFileBaseConfifPath);
final CloseableHttpClient wClient = HttpClientBuilder.create().build();
final HttpResponse wResponse = wClient.execute(wGet);
pProperties.load(wResponse.getEntity().getContent());
} else {
final CXFile wFileBaseProperty = new CXFile(aFileBaseConfifPath);
if (wFileBaseProperty.exists()) {
pProperties.load(wFileBaseProperty.getInputStream());
}
}
} else {
// set default value for properties if no base
pProperties.put("org.osgi.framework.storage.clean", "none");
pProperties.put("org.osgi.framework.storage", "bundle-cache");
pProperties.put("org.osgi.framework.startlevel.beginning", "4");
pProperties.put("felix.cache.rootdir",
felixCacheRootDir != null ? felixCacheRootDir : "/opt/node/felix/rootdir");
}
final List<String> wListSymbolicBundleName = getListSymbolicBundleNameToAdd(aLauncherEclipseDom);
final List<String> wTreatedSymbolicNames = new ArrayList<>();
wTreatedSymbolicNames.addAll(wListSymbolicBundleName);
// add felix framework that is not checked in eclipse configuration
// get bundle property to add the new one
String wListBundles = pProperties.getProperty("felix.auto.start.4");
if (wListBundles == null) {
wListBundles = "";
}
// wListBundles += "# add bundle from launch configuration \n";
for (final String wSymbolicBundleToAdd : wListSymbolicBundleName) {
if (pMapSymbolicNameToJarPath.containsKey(wSymbolicBundleToAdd)) {
if (wListBundles.length() > 0) {
wListBundles += " \\\n";
}
final String wAddBundle = "file:\\" + pMapSymbolicNameToJarPath.get(wSymbolicBundleToAdd);
getLog().info(String.format("add bundle=[%s]!", wAddBundle));
// todo replace path by new location
wListBundles += wAddBundle;
wTreatedSymbolicNames.remove(wSymbolicBundleToAdd);
}
}
if (wTreatedSymbolicNames.size() > 0) {
getLog().error("symbolicName no treated " + wTreatedSymbolicNames);
throw new MojoExecutionException("symbolicName no treated " + wTreatedSymbolicNames);
}
// reploace all localDir by target dir
for (final String wLocalDir : wDirsBundleLocation.keySet()) {
wListBundles = wListBundles.replaceAll(wLocalDir, wDirsBundleLocation.get(wLocalDir));
}
getLog().info(String.format("felix.auto.start.4=[%s]!", wListBundles));
pProperties.put("felix.auto.start.4", wListBundles);
getLog().debug(String.format("properties file content=[%s]", pProperties.toString()));
if (pathTargerConfigFile != null) {
final CXFileUtf8 pFileTargerConfigFile = new CXFileUtf8(pathTargerConfigFile);
pFileTargerConfigFile.getParentDirectory().mkdirs();
pFileTargerConfigFile.openWrite();
for (final Object wKey : pProperties.keySet()) {
pFileTargerConfigFile.write(wKey.toString() + "=" + pProperties.getProperty(wKey.toString()) + "\n");
}
pFileTargerConfigFile.close();
}
}
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
// TODO Auto-generated method stub
getLog().info("execute ");
if (launchEclipseFile != null) {
final File wFileLaunchConfig = new File(launchEclipseFile);
if (pPathBundleTarget != null) {
if (!wFileLaunchConfig.exists()) {
getLog().error(String.format("file %s no found !", wFileLaunchConfig.getAbsolutePath()));
throw new MojoExecutionException(
String.format("file %s no found !", wFileLaunchConfig.getAbsolutePath()));
} else {
try {
getLog().info(String.format("launch file=[%s]!", wFileLaunchConfig.getAbsolutePath()));
getLog().info(String.format("path directories jar=[%s]!", pPathBundleTarget));
final Document wLauncherEclipseDom = getDocumentFromLauncherFile(launchEclipseFile);
createConfigFelixFile(sourceConfigFile, wLauncherEclipseDom);
createJvmShell(wLauncherEclipseDom);
} catch (final Exception e) {
getLog().error(String.format("fail to parse xml file %s error=[%s]!",
wFileLaunchConfig.getAbsolutePath(), CXException.eInString(e)));
throw new MojoExecutionException(String.format("fail to parse xml file %s error=[%s]!",
wFileLaunchConfig.getAbsolutePath(), CXException.eInString(e)));
}
}
} else {
getLog().info(String.format("no path bundle directory !"));
}
} else {
getLog().info(String.format("no launch file %s!", launchEclipseFile));
}
}
}
|
org.cohorte.eclipse.felix.config.generator/src/org/cohorte/eclipse/felix/config/generator/ConfigGenerator.java
|
package org.cohorte.eclipse.felix.config.generator;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.stream.Collectors;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.project.MavenProject;
import org.psem2m.utilities.CXException;
import org.psem2m.utilities.files.CXFile;
import org.psem2m.utilities.files.CXFileDir;
import org.psem2m.utilities.files.CXFileUtf8;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* class that read a launch configuration to generator a config.properties that
* is use by felix to launch framework
*
* @author apisu
*
*/
@Mojo(name = "generate-config", defaultPhase = LifecyclePhase.COMPILE)
public class ConfigGenerator extends AbstractMojo {
@Parameter(defaultValue = "${project}", required = true, readonly = true)
MavenProject project;
// base new config file on this config file
@Parameter(property = "base.felix.config.file.path")
String sourceConfigFile;
@Parameter(property = "launch.eclipse.file.path")
String launchEclipseFile;
@Parameter(property = "shell.felix.jar.file.path")
String shellFelixJarFilePath;
@Parameter(property = "shell.felix.config.file.path")
String shellFelixConfigFilePath;
@Parameter(property = "felix.cache.rootdir")
String felixCacheRootDir;
@Parameter(property = "shell.vmarguments")
String overrideShellArgument;
@Parameter(property = "shell.vmarguments.file.path")
String overrideShellArgumentFilePath;
@Parameter(property = "target.shell.file.path")
String targetLaunchJvmFile;
@Parameter(property = "target.config.file.path")
String pathTargerConfigFile;
// can express multiple folder with ";" separator. the property express also
// pair of path in local disk and path in target disk for config
// eg cohorte-home/repo:opt/cohorte/repo; another path
@Parameter(property = "bundle.jar.directories")
String pPathBundleTarget;
private final Properties pProperties = new Properties();
private final Map<String, String> pMapSymbolicNameToJarPath = new HashMap<>();
/**
* return the symolic bundle name in jar file if t's a bundle else null
*
* @return
* @throws IOException
*/
private String getBundleSymbolicNameFromJar(String aFilePath) throws IOException {
getLog().debug(String.format("getBundleSymbolicNameFromJar file =[%s]", aFilePath));
String wSymbolicName = null;
final ZipFile wZipFile = new ZipFile(aFilePath);
try {
final Enumeration<? extends ZipEntry> wEntries = wZipFile.entries();
while (wEntries.hasMoreElements()) {
final ZipEntry wEntry = wEntries.nextElement();
if (wEntry.getName().contains("MANIFEST.MF")) {
final InputStream wStream = wZipFile.getInputStream(wEntry);
final String wContent = new BufferedReader(new InputStreamReader(wStream)).lines()
.collect(Collectors.joining("\n"));
wStream.close();
final String[] wLines = wContent.split("\n");
for (int i = 0; i < wLines.length && wSymbolicName == null; i++) {
final String wLine = wLines[i];
if (wLine.contains("Bundle-SymbolicName: ")) {
wSymbolicName = wLine.replace("Bundle-SymbolicName: ", "");
getLog().debug(String.format("symbolic name=[%s] file =[%s]", wSymbolicName, aFilePath));
}
}
}
}
return wSymbolicName;
} finally {
wZipFile.close();
}
}
/**
* list bundle jar file in this directory with symbolic name
*
* @param aDir
*
* @return
* @throws IOException
*/
private void analyseDir(CXFileDir aDir) throws IOException {
getLog().debug(String.format("analyseDir =[%s]", aDir.getAbsolutePath()));
for (final String wFile : aDir.list()) {
getLog().debug(String.format("dir=[%s] file =[%s]", aDir.getAbsolutePath(), wFile));
if (wFile.endsWith(".jar")) {
// check if it's a bundle
final String wFullFilePath = aDir.getAbsolutePath() + File.separatorChar + wFile;
String wSymbolicBundleName = getBundleSymbolicNameFromJar(wFullFilePath);
if (wSymbolicBundleName != null) {
if (wSymbolicBundleName.contains(";")) {
wSymbolicBundleName = wSymbolicBundleName.split(";")[0];
}
getLog().debug(String.format("===>symbolicName=[%s] \n, path jar=[%s]", wSymbolicBundleName,
wFullFilePath));
pMapSymbolicNameToJarPath.put(wSymbolicBundleName, wFullFilePath);
}
} else {
final CXFileDir wSubDir = new CXFileDir(aDir, wFile);
if (wSubDir.exists() && wSubDir.isDirectory()) {
analyseDir(wSubDir);
}
}
}
}
private Map<String, String> analyseDirectory(String aPathBundleTarget) throws MojoExecutionException {
final Map<String, String> wDirsBundleLocation = new HashMap<>();
try {
final String wPathBundle = aPathBundleTarget.replaceAll("\n", "").replaceAll("\t", "").replaceAll(" ", "");
getLog().info(String.format("analyseDirectory dir=[%s]!", wPathBundle));
if (wPathBundle.contains(";")) {
for (final String wPathPair : wPathBundle.split(";")) {
if (wPathPair.contains("=")) {
final String wPathLocalDir = wPathPair.split("=")[0];
final String wPathTargetDir = wPathPair.split("=")[1];
final CXFileDir wDir = new CXFileDir(wPathLocalDir);
if (wDir.isDirectory() && wDir.exists()) {
getLog().info(String.format("add dir local=[%s]!", wPathLocalDir));
getLog().info(String.format("add dir target=[%s]!", wPathTargetDir));
analyseDir(wDir);
wDirsBundleLocation.put(wDir.getAbsolutePath(), wPathTargetDir);
}
}
}
} else {
if (wPathBundle.contains("=")) {
final String wPathLocalDir = wPathBundle.split("=")[0];
final String wPathTargetDir = wPathBundle.split("=")[1];
final CXFileDir wDir = new CXFileDir(wPathLocalDir);
if (wDir.isDirectory() && wDir.exists()) {
getLog().info(String.format("add dir local=[%s]!", wPathLocalDir));
getLog().info(String.format("add dir target=[%s]!", wPathTargetDir));
analyseDir(wDir);
wDirsBundleLocation.put(wDir.getAbsolutePath(), wPathTargetDir);
}
}
}
return wDirsBundleLocation;
} catch (final Exception e) {
getLog().error(String.format("fail to analyse directory %s error=[%s]!", pPathBundleTarget,
CXException.eInString(e)));
throw new MojoExecutionException(String.format("fail to analyse directory %s error=[%s]!",
aPathBundleTarget, CXException.eInString(e)));
}
}
private Document getDocumentFromLauncherFile(String aLaunchConfigFile)
throws ParserConfigurationException, SAXException, IOException {
final DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder;
dBuilder = dbFactory.newDocumentBuilder();
return dBuilder.parse(aLaunchConfigFile);
}
private String getVMParameter(Document wDocLauncherEclipse)
throws SAXException, IOException, ParserConfigurationException {
String wVmArgument = null;
final NodeList wNodeList = wDocLauncherEclipse.getElementsByTagName("stringAttribute");
// checkk which string property describe list of bundle
for (int x = 0; x < wNodeList.getLength() && wVmArgument == null; x++) {
final Node wNode = wNodeList.item(x);
if (wNode instanceof Element) {
final Element wElement = (Element) wNode;
final String wValue = null;
if (wElement.getAttribute("key") != null) {
if (wElement.getAttribute("key").equals("org.eclipse.jdt.launching.VM_ARGUMENTS")) {
wVmArgument = wElement.getAttribute("value");
}
}
}
}
String wVmArgUsableInShell = "";
final String[] wLines = wVmArgument.split("\n");
final Map<String, String> wMapOverrideArgument = new HashMap<>();
if (overrideShellArgumentFilePath != null) {
getLog().debug(String.format("file override argument vm %s", overrideShellArgumentFilePath));
final CXFileUtf8 wFileContentOverrideVmArg = new CXFileUtf8(overrideShellArgumentFilePath);
if (wFileContentOverrideVmArg.exists()) {
overrideShellArgument = wFileContentOverrideVmArg.readAll();
getLog().debug(String.format("content file override argument vm %s", overrideShellArgument));
}
} else {
getLog().debug(String.format("no file override argument vm %s", overrideShellArgumentFilePath));
}
if (overrideShellArgument != null) {
final List<String> wOverrideShellArgument = Arrays.asList(overrideShellArgument.split("\n"));
for (final String wOverrideArg : wOverrideShellArgument) {
if (wOverrideArg.contains("=")) {
final String[] wSplit = wOverrideArg.split("=");
String wArgumentKey = wSplit[0];
getLog().debug(String.format("argument override key %s", wArgumentKey));
wArgumentKey = wArgumentKey.replaceAll(" ", "").replaceAll("\t", "");
if (wArgumentKey.startsWith("-D")) {
wArgumentKey = wArgumentKey.substring(2);
}
if (wSplit.length > 1) {
wMapOverrideArgument.put(wArgumentKey, wSplit[1]);
} else {
wMapOverrideArgument.put(wArgumentKey, "");
}
}
}
} else {
getLog().debug(String.format("no override argumet"));
}
for (final String wLine : wLines) {
if (wLine.trim().length() > 0) {
String wArgumentKey = null;
if (wLine.contains("=")) {
wArgumentKey = wLine.split("=")[0];
if (wArgumentKey.startsWith("-D")) {
wArgumentKey = wArgumentKey.substring(2);
}
}
if (wArgumentKey != null) {
getLog().debug(String.format("argument key %s", wArgumentKey));
if (wMapOverrideArgument.keySet().contains(wArgumentKey)) {
// wVmArgUsableInShell += "# override launch eclipse vm argument by maven
// task\n";
wVmArgUsableInShell += "\t-D" + wArgumentKey + "=" + wMapOverrideArgument.get(wArgumentKey)
+ " \\\n";
} else {
wVmArgUsableInShell += "\t" + wLine + " \\\n";
}
} else {
wVmArgUsableInShell += "\t" + wLine + " \\\n";
}
}
}
getLog().debug(String.format("vm arguments [%s]", wVmArgUsableInShell));
return wVmArgUsableInShell;
}
private List<String> getListSymbolicBundleNameToAdd(Document wDocLauncherEclipse)
throws SAXException, IOException, ParserConfigurationException {
final List<String> wLisSymbolicBundleName = new ArrayList<>();
final NodeList wNodeList = wDocLauncherEclipse.getElementsByTagName("stringAttribute");
// checkk which string property describe list of bundle
for (int x = 0; x < wNodeList.getLength(); x++) {
final Node wNode = wNodeList.item(x);
if (wNode instanceof Element) {
final Element wElement = (Element) wNode;
String wValue = null;
if (wElement.getAttribute("key") != null) {
if (wElement.getAttribute("key").equals("workspace_bundles")) {
// bundle of the current project
wValue = wElement.getAttribute("value");
} else if (wElement.getAttribute("key").equals("target_bundles")) {
// bundle of the current project
wValue = wElement.getAttribute("value");
}
}
if (wValue != null) {
for (final String wBundleName : wValue.split(",")) {
if (wBundleName.contains("@")) {
wLisSymbolicBundleName.add(wBundleName.split("@")[0]);
}
}
}
// search bundle path in repo
}
}
getLog().debug(String.format("symbolic name bundle to add [%s]", wLisSymbolicBundleName));
return wLisSymbolicBundleName;
}
private void createJvmShell(Document aLauncherEclipseDom)
throws MojoExecutionException, IOException, SAXException, ParserConfigurationException {
final String wShellFormat = "#!/bin/sh\njava %s -Dfelix.config.properties=file:/%s -Dfile.encoding=UTF-8 -jar %s bundle-cache -consoleLog -console";
final String wVmArgument = getVMParameter(aLauncherEclipseDom);
final String wShell = String.format(wShellFormat, wVmArgument, shellFelixConfigFilePath, shellFelixJarFilePath);
getLog().info(String.format("shell launch jvm =[%s]", wShell));
if (targetLaunchJvmFile != null) {
final CXFileUtf8 wShellFile = new CXFileUtf8(targetLaunchJvmFile);
wShellFile.getParentDirectory().mkdirs();
wShellFile.writeAll(wShell);
}
}
private void createConfigFelixFile(String aFileBaseConfifPath, Document aLauncherEclipseDom)
throws MojoExecutionException, IOException, SAXException, ParserConfigurationException {
final Map<String, String> wDirsBundleLocation = analyseDirectory(pPathBundleTarget);
if (aFileBaseConfifPath != null) {
// load property from file
if (aFileBaseConfifPath.startsWith("http://") || aFileBaseConfifPath.startsWith("https://")) {
// get content by using http
final HttpGet wGet = new HttpGet(aFileBaseConfifPath);
final CloseableHttpClient wClient = HttpClientBuilder.create().build();
final HttpResponse wResponse = wClient.execute(wGet);
pProperties.load(wResponse.getEntity().getContent());
} else {
final CXFile wFileBaseProperty = new CXFile(aFileBaseConfifPath);
if (wFileBaseProperty.exists()) {
pProperties.load(wFileBaseProperty.getInputStream());
}
}
} else {
// set default value for properties if no base
pProperties.put("org.osgi.framework.storage.clean", "none");
pProperties.put("org.osgi.framework.storage", "bundle-cache");
pProperties.put("org.osgi.framework.startlevel.beginning", "4");
pProperties.put("felix.cache.rootdir",
felixCacheRootDir != null ? felixCacheRootDir : "/opt/node/felix/rootdir");
}
final List<String> wListSymbolicBundleName = getListSymbolicBundleNameToAdd(aLauncherEclipseDom);
final List<String> wTreatedSymbolicNames = new ArrayList<>();
wTreatedSymbolicNames.addAll(wListSymbolicBundleName);
// add felix framework that is not checked in eclipse configuration
// get bundle property to add the new one
String wListBundles = pProperties.getProperty("felix.auto.start.4");
if (wListBundles == null) {
wListBundles = "";
}
// wListBundles += "# add bundle from launch configuration \n";
for (final String wSymbolicBundleToAdd : wListSymbolicBundleName) {
if (pMapSymbolicNameToJarPath.containsKey(wSymbolicBundleToAdd)) {
if (wListBundles.length() > 0) {
wListBundles += " \\\n";
}
final String wAddBundle = "file:\\" + pMapSymbolicNameToJarPath.get(wSymbolicBundleToAdd);
getLog().info(String.format("add bundle=[%s]!", wAddBundle));
// todo replace path by new location
wListBundles += wAddBundle;
wTreatedSymbolicNames.remove(wSymbolicBundleToAdd);
}
}
if (wTreatedSymbolicNames.size() > 0) {
getLog().error("symbolicName no treated " + wTreatedSymbolicNames);
throw new MojoExecutionException("symbolicName no treated " + wTreatedSymbolicNames);
}
// reploace all localDir by target dir
for (final String wLocalDir : wDirsBundleLocation.keySet()) {
wListBundles = wListBundles.replaceAll(wLocalDir, wDirsBundleLocation.get(wLocalDir));
}
getLog().info(String.format("felix.auto.start.4=[%s]!", wListBundles));
pProperties.put("felix.auto.start.4", wListBundles);
getLog().debug(String.format("properties file content=[%s]", pProperties.toString()));
if (pathTargerConfigFile != null) {
final CXFileUtf8 pFileTargerConfigFile = new CXFileUtf8(pathTargerConfigFile);
pFileTargerConfigFile.getParentDirectory().mkdirs();
pFileTargerConfigFile.openWrite();
for (final Object wKey : pProperties.keySet()) {
pFileTargerConfigFile.write(wKey.toString() + "=" + pProperties.getProperty(wKey.toString()) + "\n");
}
pFileTargerConfigFile.close();
}
}
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
// TODO Auto-generated method stub
getLog().info("execute ");
if (launchEclipseFile != null) {
final File wFileLaunchConfig = new File(launchEclipseFile);
if (pPathBundleTarget != null) {
if (!wFileLaunchConfig.exists()) {
getLog().error(String.format("file %s no found !", wFileLaunchConfig.getAbsolutePath()));
throw new MojoExecutionException(
String.format("file %s no found !", wFileLaunchConfig.getAbsolutePath()));
} else {
try {
getLog().info(String.format("launch file=[%s]!", wFileLaunchConfig.getAbsolutePath()));
getLog().info(String.format("path directories jar=[%s]!", pPathBundleTarget));
final Document wLauncherEclipseDom = getDocumentFromLauncherFile(launchEclipseFile);
createConfigFelixFile(sourceConfigFile, wLauncherEclipseDom);
createJvmShell(wLauncherEclipseDom);
} catch (final Exception e) {
getLog().error(String.format("fail to parse xml file %s error=[%s]!",
wFileLaunchConfig.getAbsolutePath(), CXException.eInString(e)));
throw new MojoExecutionException(String.format("fail to parse xml file %s error=[%s]!",
wFileLaunchConfig.getAbsolutePath(), CXException.eInString(e)));
}
}
} else {
getLog().info(String.format("no path bundle directory !"));
}
} else {
getLog().info(String.format("no launch file %s!", launchEclipseFile));
}
}
}
|
use coa instaed of equals
|
org.cohorte.eclipse.felix.config.generator/src/org/cohorte/eclipse/felix/config/generator/ConfigGenerator.java
|
use coa instaed of equals
|
|
Java
|
apache-2.0
|
ce74909e80c2ba09dca758ee7092b196030ec2fb
| 0
|
ChinaQuants/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,nssales/OG-Platform,jerome79/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,jerome79/OG-Platform,ChinaQuants/OG-Platform,ChinaQuants/OG-Platform,DevStreet/FinanceAnalytics,nssales/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,jerome79/OG-Platform,codeaudit/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,jeorme/OG-Platform,McLeodMoores/starling,nssales/OG-Platform
|
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.currency;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.core.historicaltimeseries.HistoricalTimeSeries;
import com.opengamma.core.value.MarketDataRequirementNames;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.marketdata.ExternalIdBundleResolver;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaCompilationContext;
import com.opengamma.financial.analytics.timeseries.DateConstraint;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils;
import com.opengamma.financial.currency.CurrencyMatrixValue.CurrencyMatrixCross;
import com.opengamma.financial.currency.CurrencyMatrixValue.CurrencyMatrixFixed;
import com.opengamma.financial.currency.CurrencyMatrixValue.CurrencyMatrixValueRequirement;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.id.UniqueId;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolutionResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver;
import com.opengamma.util.money.Currency;
import com.opengamma.util.timeseries.DoubleTimeSeries;
import com.opengamma.util.tuple.Pair;
/**
* Injects a time series implied from a value from a {@link CurrencyMatrix} into a dependency graph to satisfy the currency requirements generated by {@link CurrencySeriesConversionFunction}.
*/
public class CurrencyMatrixSeriesSourcingFunction extends AbstractCurrencyMatrixSourcingFunction {
private static final Logger s_logger = LoggerFactory.getLogger(CurrencyMatrixSeriesSourcingFunction.class);
// PLAT-2813 Don't need this if we can request HTS requirements directly
private HistoricalTimeSeriesResolver _htsResolver;
public CurrencyMatrixSeriesSourcingFunction(final String currencyMatrixName) {
super(currencyMatrixName);
}
public CurrencyMatrixSeriesSourcingFunction(final String[] params) {
super(params);
}
protected void setHistoricalTimeSeriesResolver(final HistoricalTimeSeriesResolver htsResolver) {
_htsResolver = htsResolver;
}
protected HistoricalTimeSeriesResolver getHistoricalTimeSeriesResolver() {
return _htsResolver;
}
@Override
public void init(final FunctionCompilationContext context) {
super.init(context);
// PLAT-2813 Don't need this if we can request HTS requirements directly
setHistoricalTimeSeriesResolver(OpenGammaCompilationContext.getHistoricalTimeSeriesResolver(context));
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) {
final ComputationTargetSpecification targetSpec = target.toSpecification();
final ValueProperties properties = createValueProperties().get();
final Set<ValueSpecification> results = new HashSet<ValueSpecification>();
if (getHistoricalTimeSeriesResolver() != null) {
results.add(new ValueSpecification(CurrencySeriesConversionFunction.SPOT_RATE, targetSpec, properties));
}
return results;
}
private ValueRequirement getSeriesConversionRequirement(final ExternalIdBundleResolver resolver, final CurrencyMatrixValueRequirement valueRequirement) {
final ValueRequirement requirement = valueRequirement.getValueRequirement();
// TODO: PLAT-2813 Don't perform the resolution here; request the time series directly
final ExternalIdBundle targetIdentifiers = resolver.getExternalIdBundle(requirement.getTargetReference());
if (targetIdentifiers == null) {
return null;
}
final HistoricalTimeSeriesResolutionResult timeSeries = getHistoricalTimeSeriesResolver().resolve(targetIdentifiers, null, null, null, MarketDataRequirementNames.MARKET_VALUE, null);
if (timeSeries == null) {
return null;
}
// TODO: Requesting the whole time series isn't ideal but we don't know which points will be needed. Could the time series somehow be a lazy-fetch?
// Is this really a problem - caching the whole time series at a calc node may be better than requesting different subsets each time?
return HistoricalTimeSeriesFunctionUtils.createHTSRequirement(timeSeries, MarketDataRequirementNames.MARKET_VALUE, DateConstraint.NULL, true,
DateConstraint.VALUATION_TIME, true);
}
private boolean getSeriesConversionRequirements(final ExternalIdBundleResolver resolver, final Set<ValueRequirement> requirements, final Set<Pair<Currency, Currency>> visited,
final Pair<Currency, Currency> currencies) {
if (!visited.add(currencies)) {
// Gone round in a loop if we've already seen this pair
throw new IllegalStateException();
}
final CurrencyMatrixValue value = getCurrencyMatrix().getConversion(currencies.getFirst(), currencies.getSecond());
if (value != null) {
return value.accept(new CurrencyMatrixValueVisitor<Boolean>() {
@Override
public Boolean visitCross(final CurrencyMatrixCross cross) {
return getSeriesConversionRequirements(resolver, requirements, visited, Pair.of(currencies.getFirst(), cross.getCrossCurrency()))
&& getSeriesConversionRequirements(resolver, requirements, visited, Pair.of(cross.getCrossCurrency(), currencies.getSecond()));
}
@Override
public Boolean visitFixed(final CurrencyMatrixFixed fixedValue) {
// Literal value - nothing required
return Boolean.TRUE;
}
@Override
public Boolean visitValueRequirement(final CurrencyMatrixValueRequirement valueRequirement) {
final ValueRequirement requirement = getSeriesConversionRequirement(resolver, valueRequirement);
if (requirement == null) {
return Boolean.FALSE;
}
requirements.add(requirement);
return Boolean.TRUE;
}
});
} else {
return false;
}
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) {
final Pair<Currency, Currency> currencies = parse(target.getUniqueId());
final Set<ValueRequirement> requirements = new HashSet<ValueRequirement>();
if (!getSeriesConversionRequirements(new ExternalIdBundleResolver(context.getComputationTargetResolver()), requirements, new HashSet<Pair<Currency, Currency>>(), currencies)) {
return null;
}
return requirements;
}
private Object getSeriesConversionRate(final ExternalIdBundleResolver resolver, final FunctionInputs inputs, final Currency source, final Currency target) {
final CurrencyMatrixValue value = getCurrencyMatrix().getConversion(source, target);
final Object rate = value.accept(new CurrencyMatrixValueVisitor<Object>() {
@Override
public Object visitCross(final CurrencyMatrixCross cross) {
final Object r1 = getSeriesConversionRate(resolver, inputs, source, cross.getCrossCurrency());
final Object r2 = getSeriesConversionRate(resolver, inputs, cross.getCrossCurrency(), target);
return createCrossRate(r1, r2);
}
@Override
public Object visitFixed(final CurrencyMatrixFixed fixedValue) {
return fixedValue.getFixedValue();
}
@Override
public Object visitValueRequirement(final CurrencyMatrixValueRequirement valueRequirement) {
final Object marketValue = inputs.getValue(getSeriesConversionRequirement(resolver, valueRequirement));
if (marketValue instanceof DoubleTimeSeries) {
//TODO is this branch ever reached?
DoubleTimeSeries<?> fxRate = (DoubleTimeSeries<?>) marketValue;
if (valueRequirement.isReciprocal()) {
fxRate = fxRate.reciprocal();
}
return fxRate;
} else if (marketValue instanceof HistoricalTimeSeries) {
DoubleTimeSeries<?> fxRate = ((HistoricalTimeSeries) marketValue).getTimeSeries();
if (valueRequirement.isReciprocal()) {
fxRate = fxRate.reciprocal();
}
return fxRate;
} else {
if (marketValue == null) {
throw new IllegalArgumentException("Null time series for " + valueRequirement.toString());
}
throw new IllegalArgumentException("Expected a time series for " + valueRequirement.toString() + ", got " + marketValue.getClass());
}
}
});
s_logger.debug("{} to {} = {}", new Object[] {source, target, rate });
return rate;
}
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) {
final Pair<Currency, Currency> currencies = parse(target.getUniqueId());
final ComputationTargetSpecification targetSpec = target.toSpecification();
final ValueRequirement desiredValue = desiredValues.iterator().next();
return Collections.singleton(new ComputedValue(new ValueSpecification(desiredValue.getValueName(), targetSpec, desiredValue.getConstraints()),
getSeriesConversionRate(new ExternalIdBundleResolver(executionContext.getComputationTargetResolver()), inputs, currencies.getFirst(), currencies.getSecond())));
}
public static ValueRequirement getConversionRequirement(final Currency source, final Currency target) {
return getConversionRequirement(source.getCode(), target.getCode());
}
public static ValueRequirement getConversionRequirement(final String source, final String target) {
return new ValueRequirement(CurrencySeriesConversionFunction.SPOT_RATE, ComputationTargetType.PRIMITIVE, UniqueId.of(TARGET_IDENTIFIER_SCHEME, source + target));
}
}
|
projects/OG-Financial/src/main/java/com/opengamma/financial/currency/CurrencyMatrixSeriesSourcingFunction.java
|
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.financial.currency;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.opengamma.core.historicaltimeseries.HistoricalTimeSeries;
import com.opengamma.core.value.MarketDataRequirementNames;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.ComputationTargetSpecification;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.function.FunctionInputs;
import com.opengamma.engine.marketdata.ExternalIdBundleResolver;
import com.opengamma.engine.target.ComputationTargetType;
import com.opengamma.engine.value.ComputedValue;
import com.opengamma.engine.value.ValueProperties;
import com.opengamma.engine.value.ValueRequirement;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.OpenGammaCompilationContext;
import com.opengamma.financial.analytics.timeseries.DateConstraint;
import com.opengamma.financial.analytics.timeseries.HistoricalTimeSeriesFunctionUtils;
import com.opengamma.financial.currency.CurrencyMatrixValue.CurrencyMatrixCross;
import com.opengamma.financial.currency.CurrencyMatrixValue.CurrencyMatrixFixed;
import com.opengamma.financial.currency.CurrencyMatrixValue.CurrencyMatrixValueRequirement;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.id.UniqueId;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolutionResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesResolver;
import com.opengamma.util.money.Currency;
import com.opengamma.util.timeseries.DoubleTimeSeries;
import com.opengamma.util.tuple.Pair;
/**
* Injects a time series implied from a value from a {@link CurrencyMatrix} into a dependency graph to satisfy the currency requirements generated by {@link CurrencySeriesConversionFunction}.
*/
public class CurrencyMatrixSeriesSourcingFunction extends AbstractCurrencyMatrixSourcingFunction {
private static final Logger s_logger = LoggerFactory.getLogger(CurrencyMatrixSeriesSourcingFunction.class);
// PLAT-2813 Don't need this if we can request HTS requirements directly
private HistoricalTimeSeriesResolver _htsResolver;
public CurrencyMatrixSeriesSourcingFunction(final String currencyMatrixName) {
super(currencyMatrixName);
}
public CurrencyMatrixSeriesSourcingFunction(final String[] params) {
super(params);
}
protected void setHistoricalTimeSeriesResolver(final HistoricalTimeSeriesResolver htsResolver) {
_htsResolver = htsResolver;
}
protected HistoricalTimeSeriesResolver getHistoricalTimeSeriesResolver() {
return _htsResolver;
}
@Override
public void init(final FunctionCompilationContext context) {
super.init(context);
// PLAT-2813 Don't need this if we can request HTS requirements directly
setHistoricalTimeSeriesResolver(OpenGammaCompilationContext.getHistoricalTimeSeriesResolver(context));
}
@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) {
final ComputationTargetSpecification targetSpec = target.toSpecification();
final ValueProperties properties = createValueProperties().get();
final Set<ValueSpecification> results = new HashSet<ValueSpecification>();
if (getHistoricalTimeSeriesResolver() != null) {
results.add(new ValueSpecification(CurrencySeriesConversionFunction.SPOT_RATE, targetSpec, properties));
}
return results;
}
private ValueRequirement getSeriesConversionRequirement(final ExternalIdBundleResolver resolver, final CurrencyMatrixValueRequirement valueRequirement) {
final ValueRequirement requirement = valueRequirement.getValueRequirement();
// TODO: PLAT-2813 Don't perform the resolution here; request the time series directly
final ExternalIdBundle targetIdentifiers = resolver.getExternalIdBundle(requirement.getTargetReference());
final HistoricalTimeSeriesResolutionResult timeSeries = getHistoricalTimeSeriesResolver().resolve(targetIdentifiers, null, null, null, MarketDataRequirementNames.MARKET_VALUE, null);
if (timeSeries == null) {
return null;
}
// TODO: Requesting the whole time series isn't ideal but we don't know which points will be needed. Could the time series somehow be a lazy-fetch?
// Is this really a problem - caching the whole time series at a calc node may be better than requesting different subsets each time?
return HistoricalTimeSeriesFunctionUtils.createHTSRequirement(timeSeries, MarketDataRequirementNames.MARKET_VALUE, DateConstraint.NULL, true,
DateConstraint.VALUATION_TIME, true);
}
private boolean getSeriesConversionRequirements(final ExternalIdBundleResolver resolver, final Set<ValueRequirement> requirements, final Set<Pair<Currency, Currency>> visited,
final Pair<Currency, Currency> currencies) {
if (!visited.add(currencies)) {
// Gone round in a loop if we've already seen this pair
throw new IllegalStateException();
}
final CurrencyMatrixValue value = getCurrencyMatrix().getConversion(currencies.getFirst(), currencies.getSecond());
if (value != null) {
return value.accept(new CurrencyMatrixValueVisitor<Boolean>() {
@Override
public Boolean visitCross(final CurrencyMatrixCross cross) {
return getSeriesConversionRequirements(resolver, requirements, visited, Pair.of(currencies.getFirst(), cross.getCrossCurrency()))
&& getSeriesConversionRequirements(resolver, requirements, visited, Pair.of(cross.getCrossCurrency(), currencies.getSecond()));
}
@Override
public Boolean visitFixed(final CurrencyMatrixFixed fixedValue) {
// Literal value - nothing required
return Boolean.TRUE;
}
@Override
public Boolean visitValueRequirement(final CurrencyMatrixValueRequirement valueRequirement) {
final ValueRequirement requirement = getSeriesConversionRequirement(resolver, valueRequirement);
if (requirement == null) {
return Boolean.FALSE;
}
requirements.add(requirement);
return Boolean.TRUE;
}
});
} else {
return false;
}
}
@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) {
final Pair<Currency, Currency> currencies = parse(target.getUniqueId());
final Set<ValueRequirement> requirements = new HashSet<ValueRequirement>();
if (!getSeriesConversionRequirements(new ExternalIdBundleResolver(context.getComputationTargetResolver()), requirements, new HashSet<Pair<Currency, Currency>>(), currencies)) {
return null;
}
return requirements;
}
private Object getSeriesConversionRate(final ExternalIdBundleResolver resolver, final FunctionInputs inputs, final Currency source, final Currency target) {
final CurrencyMatrixValue value = getCurrencyMatrix().getConversion(source, target);
final Object rate = value.accept(new CurrencyMatrixValueVisitor<Object>() {
@Override
public Object visitCross(final CurrencyMatrixCross cross) {
final Object r1 = getSeriesConversionRate(resolver, inputs, source, cross.getCrossCurrency());
final Object r2 = getSeriesConversionRate(resolver, inputs, cross.getCrossCurrency(), target);
return createCrossRate(r1, r2);
}
@Override
public Object visitFixed(final CurrencyMatrixFixed fixedValue) {
return fixedValue.getFixedValue();
}
@Override
public Object visitValueRequirement(final CurrencyMatrixValueRequirement valueRequirement) {
final Object marketValue = inputs.getValue(getSeriesConversionRequirement(resolver, valueRequirement));
if (marketValue instanceof DoubleTimeSeries) {
//TODO is this branch ever reached?
DoubleTimeSeries<?> fxRate = (DoubleTimeSeries<?>) marketValue;
//TODO not sure why the inverse is needed, but it's obvious that something is wrong here
if (!valueRequirement.isReciprocal()) {
fxRate = fxRate.reciprocal();
}
return fxRate;
} else if (marketValue instanceof HistoricalTimeSeries) {
DoubleTimeSeries<?> fxRate = ((HistoricalTimeSeries) marketValue).getTimeSeries();
//TODO not sure why the inverse is needed, but it's obvious that something is wrong here
if (!valueRequirement.isReciprocal()) {
fxRate = fxRate.reciprocal();
}
return fxRate;
} else {
if (marketValue == null) {
throw new IllegalArgumentException("Null time series for " + valueRequirement.toString());
}
throw new IllegalArgumentException("Expected a time series for " + valueRequirement.toString() + ", got " + marketValue.getClass());
}
}
});
s_logger.debug("{} to {} = {}", new Object[] {source, target, rate });
return rate;
}
@Override
public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) {
final Pair<Currency, Currency> currencies = parse(target.getUniqueId());
final ComputationTargetSpecification targetSpec = target.toSpecification();
final ValueRequirement desiredValue = desiredValues.iterator().next();
return Collections.singleton(new ComputedValue(new ValueSpecification(desiredValue.getValueName(), targetSpec, desiredValue.getConstraints()),
getSeriesConversionRate(new ExternalIdBundleResolver(executionContext.getComputationTargetResolver()), inputs, currencies.getFirst(), currencies.getSecond())));
}
public static ValueRequirement getConversionRequirement(final Currency source, final Currency target) {
return getConversionRequirement(source.getCode(), target.getCode());
}
public static ValueRequirement getConversionRequirement(final String source, final String target) {
return new ValueRequirement(CurrencySeriesConversionFunction.SPOT_RATE, ComputationTargetType.PRIMITIVE, UniqueId.of(TARGET_IDENTIFIER_SCHEME, source + target));
}
}
|
Avoid NPE when the resolver can't find a suitable bundle for the target.
Don't invert time series data when fetching a series.
|
projects/OG-Financial/src/main/java/com/opengamma/financial/currency/CurrencyMatrixSeriesSourcingFunction.java
|
Avoid NPE when the resolver can't find a suitable bundle for the target. Don't invert time series data when fetching a series.
|
|
Java
|
apache-2.0
|
42a8331bba248c8273e3192ba0b9e400e5a00d61
| 0
|
googleinterns/step188-2020,googleinterns/step188-2020,googleinterns/step188-2020
|
package com.google.sps;
import com.google.sps.data.VolunteeringOpportunity;
import com.google.sps.utilities.SpannerClient;
import com.google.sps.utilities.SpannerTestTasks;
import com.google.sps.utilities.SpannerTasks;
import com.google.sps.utilities.SpannerTestTasks;
import java.util.Set;
import com.google.sps.servlets.LoginStatusServlet;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.springframework.mock.web.MockServletContext;
import javax.servlet.ServletContextEvent;
/** Unit tests for DatabaseWrapper functionality related to VolunteeringOpportunity class. */
@RunWith(JUnit4.class)
public class OpportunitySpannerTasksTest {
private static final String NAME = "Performer";
private static final int NUMBER_OF_SPOTS = 240;
private static final String EVENT_ID = "0883de79-17d7-49a3-a866-dbd5135062a8";
@BeforeClass
public static void setUp() throws Exception {
// Mock a request to trigger the SpannerClient setup to run
MockServletContext mockServletContext = new MockServletContext();
new SpannerClient().contextInitialized(new ServletContextEvent(mockServletContext));
SpannerTestTasks.setup();
}
@Test
public void opportunityInsertAndRetrieval() {
VolunteeringOpportunity opportunity =
new VolunteeringOpportunity.Builder(EVENT_ID, NAME, NUMBER_OF_SPOTS).build();
SpannerTasks.insertVolunteeringOpportunity(opportunity);
VolunteeringOpportunity actualOpportunity =
SpannerTasks
.getVolunteeringOpportunityByOppportunityId(opportunity.getOpportunityId())
.get();
Assert.assertEquals(actualOpportunity, opportunity);
}
@Test
public void retrieveVolunteeringOpportunitiesByEvent() {
VolunteeringOpportunity opportunity =
new VolunteeringOpportunity.Builder(EVENT_ID, NAME, NUMBER_OF_SPOTS).build();
SpannerTasks.insertVolunteeringOpportunity(opportunity);
Set<VolunteeringOpportunity> opportunities =
SpannerTasks.getVolunteeringOpportunitiesByEventId(EVENT_ID);
VolunteeringOpportunity actualOpportunity = opportunities.stream().findFirst().get();
Assert.assertEquals(actualOpportunity.getEventId(), EVENT_ID);
Assert.assertEquals(actualOpportunity.getName(), NAME);
Assert.assertEquals(actualOpportunity.getNumSpotsLeft(), NUMBER_OF_SPOTS);
}
@AfterClass
public static void tearDown() throws Exception {
SpannerTestTasks.cleanup();
}
}
|
project/src/test/java/com/google/sps/OpportunitySpannerTasksTest.java
|
package com.google.sps;
import com.google.sps.data.VolunteeringOpportunity;
import com.google.sps.utilities.SpannerClient;
import com.google.sps.utilities.SpannerTestTasks;
import com.google.sps.utilities.SpannerTasks;
import com.google.sps.utilities.SpannerTestTasks;
import java.util.Set;
import com.google.sps.servlets.LoginStatusServlet;
import org.hamcrest.CoreMatchers;
import org.hamcrest.MatcherAssert;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.springframework.mock.web.MockServletContext;
import javax.servlet.ServletContextEvent;
/** Unit tests for DatabaseWrapper functionality related to VolunteeringOpportunity class. */
@RunWith(JUnit4.class)
public class OpportunitySpannerTasksTest {
private static final String NAME = "Performer";
private static final int NUMBER_OF_SPOTS = 240;
private static final String EVENT_ID = "0883de79-17d7-49a3-a866-dbd5135062a8";
@BeforeClass
public static void setUp() throws Exception {
// Mock a request to trigger the SpannerClient setup to run
MockServletContext mockServletContext = new MockServletContext();
new SpannerClient().contextInitialized(new ServletContextEvent(mockServletContext));
SpannerTestTasks.setup();
}
@Test
public void opportunityInsertAndRetrieval() {
VolunteeringOpportunity opportunity =
new VolunteeringOpportunity.Builder(EVENT_ID, NAME, NUMBER_OF_SPOTS).build();
SpannerTasks.insertVolunteeringOpportunity(opportunity);
VolunteeringOpportunity actualOpportunity =
SpannerTasks
.getVolunteeringOpportunityByOppportunityId(opportunity.getOpportunityId())
.get();
Assert.assertEquals(actualOpportunity, opportunity);
}
@Test
public void retrieveVolunteeringOpportunitiesByEvent() {
VolunteeringOpportunity opportunity =
new VolunteeringOpportunity.Builder(EVENT_ID, NAME, NUMBER_OF_SPOTS).build();
SpannerTasks.insertVolunteeringOpportunity(opportunity);
Set<VolunteeringOpportunity> opportunities =
SpannerTasks.getVolunteeringOpportunitiesByEventId(EVENT_ID);
MatcherAssert.assertThat(opportunities, CoreMatchers.hasItems(opportunity));
}
@AfterClass
public static void tearDown() throws Exception {
SpannerTestTasks.cleanup();
}
}
|
Fix from code review.
|
project/src/test/java/com/google/sps/OpportunitySpannerTasksTest.java
|
Fix from code review.
|
|
Java
|
apache-2.0
|
059c90b64d29e2e9e74c4b9b5495738a12c11fdd
| 0
|
lekster/devicehive-java-server,lekster/devicehive-java-server,devicehive/devicehive-java-server,biddyweb/devicehive-java-server,biddyweb/devicehive-java-server,devicehive/devicehive-java-server,lekster/devicehive-java,lekster/devicehive-java-server,biddyweb/devicehive-java-server,devicehive/devicehive-java-server
|
package com.devicehive.service;
import com.devicehive.auth.HivePrincipal;
import com.devicehive.dao.DeviceCommandDAO;
import com.devicehive.exceptions.HiveException;
import com.devicehive.messages.bus.GlobalMessageBus;
import com.devicehive.messages.handler.WebsocketHandlerCreator;
import com.devicehive.messages.subscriptions.CommandUpdateSubscription;
import com.devicehive.messages.subscriptions.SubscriptionManager;
import com.devicehive.model.Device;
import com.devicehive.model.DeviceCommand;
import com.devicehive.model.SubscriptionFilterInternal;
import com.devicehive.model.User;
import com.devicehive.model.updates.DeviceCommandUpdate;
import com.devicehive.util.LogExecutionTime;
import com.devicehive.util.Timer;
import com.devicehive.websockets.util.AsyncMessageSupplier;
import com.devicehive.websockets.util.WebsocketSession;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.validation.constraints.NotNull;
import javax.websocket.Session;
import java.sql.Timestamp;
import java.util.List;
import static javax.ws.rs.core.Response.Status.NOT_FOUND;
@Stateless
@LogExecutionTime
public class DeviceCommandService {
private DeviceCommandDAO commandDAO;
private DeviceCommandService self;
private GlobalMessageBus globalMessageBus;
private AsyncMessageSupplier asyncMessageDeliverer;
private SubscriptionManager subscriptionManager;
private DeviceService deviceService;
@EJB
public void setSubscriptionManager(SubscriptionManager subscriptionManager) {
this.subscriptionManager = subscriptionManager;
}
@EJB
public void setAsyncMessageDeliverer(AsyncMessageSupplier asyncMessageDeliverer) {
this.asyncMessageDeliverer = asyncMessageDeliverer;
}
@EJB
public void setGlobalMessageBus(GlobalMessageBus globalMessageBus) {
this.globalMessageBus = globalMessageBus;
}
@EJB
public void setSelf(DeviceCommandService self) {
this.self = self;
}
@EJB
public void setCommandDAO(DeviceCommandDAO commandDAO) {
this.commandDAO = commandDAO;
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand getWithDevice(@NotNull long id) {
return commandDAO.getWithDevice(id);
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand getWithDeviceAndUser(@NotNull long id) {
return commandDAO.getWithDeviceAndUser(id);
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand getByGuidAndId(@NotNull String guid, @NotNull long id) {
return commandDAO.getByDeviceGuidAndId(guid, id);
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand findById(Long id) {
return commandDAO.findById(id);
}
@EJB
public void setDeviceService(DeviceService deviceService) {
this.deviceService = deviceService;
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public List<DeviceCommand> getDeviceCommandsList(@NotNull SubscriptionFilterInternal subscriptionFilter,
HivePrincipal principal) {
if (subscriptionFilter.getDeviceNames() != null) {
return commandDAO
.findCommands(deviceService.createFilterMap(subscriptionFilter.getDeviceNames(), principal),
subscriptionFilter.getTimestamp(), null);
} else {
return commandDAO.findCommands(
subscriptionFilter.getTimestamp(),
subscriptionFilter.getNames(),
principal);
}
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public List<DeviceCommand> queryDeviceCommand(Device device, Timestamp start, Timestamp end, String command,
String status, String sortField, Boolean sortOrderAsc,
Integer take, Integer skip, Integer gridInterval) {
return commandDAO.queryDeviceCommand(device, start, end, command, status, sortField, sortOrderAsc, take,
skip, gridInterval);
}
public DeviceCommand getByDeviceGuidAndId(@NotNull String guid, @NotNull long id) {
return commandDAO.getByDeviceGuidAndId(guid, id);
}
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public void submitDeviceCommandUpdate(DeviceCommandUpdate update, Device device) {
Timer timer = Timer.newInstance();
DeviceCommand saved = self.saveDeviceCommandUpdate(update, device);
timer.logMethodExecuted("DeviceCommandService.self.saveDeviceCommandUpdate");
globalMessageBus.publishDeviceCommandUpdate(saved);
}
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public void submitDeviceCommand(DeviceCommand command, Device device, User user, final Session session) {
Timer timer = Timer.newInstance();
self.saveDeviceCommand(command, device, user, session);
timer.logMethodExecuted("DeviceCommandService.self.saveDeviceCommand");
globalMessageBus.publishDeviceCommand(command);
}
public void saveDeviceCommand(final DeviceCommand command, Device device, User user, final Session session) {
command.setDevice(device);
command.setUser(user);
commandDAO.createCommand(command);
if (session != null) {
Runnable removeHandler = new Runnable() {
@Override
public void run() {
subscriptionManager.getCommandUpdateSubscriptionStorage().remove(command.getId(), session.getId());
}
};
CommandUpdateSubscription commandUpdateSubscription =
new CommandUpdateSubscription(command.getId(), session.getId(),
new WebsocketHandlerCreator(session, WebsocketSession.COMMAND_UPDATES_SUBSCRIPTION_LOCK,
asyncMessageDeliverer, removeHandler));
subscriptionManager.getCommandUpdateSubscriptionStorage().insert(commandUpdateSubscription);
}
}
public DeviceCommand saveDeviceCommandUpdate(DeviceCommandUpdate update, Device device) {
DeviceCommand cmd = commandDAO.findById(update.getId());
if (cmd == null) {
throw new HiveException("Command not found!", NOT_FOUND.getStatusCode());
}
if (!cmd.getDevice().getId().equals(device.getId())) {
throw new HiveException(
"Command with id " + update.getId() + " wasn't found for device with id " + device.getGuid(),
NOT_FOUND.getStatusCode());
}
if (update.getCommand() != null) {
cmd.setCommand(update.getCommand().getValue());
}
if (update.getFlags() != null) {
cmd.setFlags(update.getFlags().getValue());
}
if (update.getLifetime() != null) {
cmd.setLifetime(update.getLifetime().getValue());
}
if (update.getParameters() != null) {
cmd.setParameters(update.getParameters().getValue());
}
if (update.getResult() != null) {
cmd.setResult(update.getResult().getValue());
}
if (update.getStatus() != null) {
cmd.setStatus(update.getStatus().getValue());
}
if (update.getTimestamp() != null) {
cmd.setTimestamp(update.getTimestamp().getValue());
}
return cmd;
}
}
|
server/src/main/java/com/devicehive/service/DeviceCommandService.java
|
package com.devicehive.service;
import com.devicehive.auth.HivePrincipal;
import com.devicehive.dao.DeviceCommandDAO;
import com.devicehive.exceptions.HiveException;
import com.devicehive.messages.bus.GlobalMessageBus;
import com.devicehive.messages.handler.WebsocketHandlerCreator;
import com.devicehive.messages.subscriptions.CommandUpdateSubscription;
import com.devicehive.messages.subscriptions.SubscriptionManager;
import com.devicehive.model.*;
import com.devicehive.model.updates.DeviceCommandUpdate;
import com.devicehive.util.LogExecutionTime;
import com.devicehive.util.Timer;
import com.devicehive.websockets.util.AsyncMessageSupplier;
import com.devicehive.websockets.util.WebsocketSession;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.validation.constraints.NotNull;
import javax.websocket.Session;
import java.sql.Timestamp;
import java.util.List;
import static javax.ws.rs.core.Response.Status.NOT_FOUND;
import static javax.ws.rs.core.Response.Status.UNAUTHORIZED;
@Stateless
@LogExecutionTime
public class DeviceCommandService {
private DeviceCommandDAO commandDAO;
private DeviceCommandService self;
private GlobalMessageBus globalMessageBus;
private AsyncMessageSupplier asyncMessageDeliverer;
private SubscriptionManager subscriptionManager;
private DeviceService deviceService;
@EJB
public void setSubscriptionManager(SubscriptionManager subscriptionManager) {
this.subscriptionManager = subscriptionManager;
}
@EJB
public void setAsyncMessageDeliverer(AsyncMessageSupplier asyncMessageDeliverer) {
this.asyncMessageDeliverer = asyncMessageDeliverer;
}
@EJB
public void setGlobalMessageBus(GlobalMessageBus globalMessageBus) {
this.globalMessageBus = globalMessageBus;
}
@EJB
public void setSelf(DeviceCommandService self) {
this.self = self;
}
@EJB
public void setCommandDAO(DeviceCommandDAO commandDAO) {
this.commandDAO = commandDAO;
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand getWithDevice(@NotNull long id) {
return commandDAO.getWithDevice(id);
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand getWithDeviceAndUser(@NotNull long id) {
return commandDAO.getWithDeviceAndUser(id);
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand getByGuidAndId(@NotNull String guid, @NotNull long id) {
return commandDAO.getByDeviceGuidAndId(guid, id);
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public DeviceCommand findById(Long id) {
return commandDAO.findById(id);
}
@EJB
public void setDeviceService(DeviceService deviceService) {
this.deviceService = deviceService;
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public List<DeviceCommand> getDeviceCommandsList(@NotNull SubscriptionFilterInternal subscriptionFilter, HivePrincipal principal) {
if (subscriptionFilter.getDeviceNames() != null) {
return commandDAO.findCommands(deviceService.createFilterMap(subscriptionFilter.getDeviceNames(),principal), subscriptionFilter.getTimestamp(), null);
} else {
return commandDAO.findCommands(
subscriptionFilter.getTimestamp(),
subscriptionFilter.getNames(),
principal);
}
}
@TransactionAttribute(TransactionAttributeType.SUPPORTS)
public List<DeviceCommand> queryDeviceCommand(Device device, Timestamp start, Timestamp end, String command,
String status, String sortField, Boolean sortOrderAsc,
Integer take, Integer skip, Integer gridInterval) {
return commandDAO.queryDeviceCommand(device, start, end, command, status, sortField, sortOrderAsc, take,
skip, gridInterval);
}
public DeviceCommand getByDeviceGuidAndId(@NotNull String guid, @NotNull long id) {
return commandDAO.getByDeviceGuidAndId(guid, id);
}
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public void submitDeviceCommandUpdate(DeviceCommandUpdate update, Device device) {
Timer timer = Timer.newInstance();
DeviceCommand saved = self.saveDeviceCommandUpdate(update, device);
timer.logMethodExecuted("DeviceCommandService.self.saveDeviceCommandUpdate");
globalMessageBus.publishDeviceCommandUpdate(saved);
}
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public void submitDeviceCommand(DeviceCommand command, Device device, User user, final Session session) {
Timer timer = Timer.newInstance();
self.saveDeviceCommand(command, device, user, session);
timer.logMethodExecuted("DeviceCommandService.self.saveDeviceCommand");
globalMessageBus.publishDeviceCommand(command);
}
public void saveDeviceCommand(final DeviceCommand command, Device device, User user, final Session session) {
command.setDevice(device);
command.setUser(user);
commandDAO.createCommand(command);
if (session != null) {
Runnable removeHandler = new Runnable() {
@Override
public void run() {
subscriptionManager.getCommandUpdateSubscriptionStorage().remove(command.getId(), session.getId());
}
};
CommandUpdateSubscription commandUpdateSubscription =
new CommandUpdateSubscription(command.getId(), session.getId(),
new WebsocketHandlerCreator(session, WebsocketSession.COMMAND_UPDATES_SUBSCRIPTION_LOCK,
asyncMessageDeliverer, removeHandler));
subscriptionManager.getCommandUpdateSubscriptionStorage().insert(commandUpdateSubscription);
}
}
public DeviceCommand saveDeviceCommandUpdate(DeviceCommandUpdate update, Device device) {
DeviceCommand cmd = commandDAO.findById(update.getId());
if (cmd == null) {
throw new HiveException("Command not found!", NOT_FOUND.getStatusCode());
}
if (!cmd.getDevice().getId().equals(device.getId())) {
throw new HiveException("Device tries to update incorrect command", UNAUTHORIZED.getStatusCode());
}
if (update.getCommand() != null) {
cmd.setCommand(update.getCommand().getValue());
}
if (update.getFlags() != null) {
cmd.setFlags(update.getFlags().getValue());
}
if (update.getLifetime() != null) {
cmd.setLifetime(update.getLifetime().getValue());
}
if (update.getParameters() != null) {
cmd.setParameters(update.getParameters().getValue());
}
if (update.getResult() != null) {
cmd.setResult(update.getResult().getValue());
}
if (update.getStatus() != null) {
cmd.setStatus(update.getStatus().getValue());
}
if (update.getTimestamp() != null) {
cmd.setTimestamp(update.getTimestamp().getValue());
}
return cmd;
}
}
|
UNAUTHORIZED changed to NOT_FOUND status code in case if device.guid isn't equal to command.device.guid.
|
server/src/main/java/com/devicehive/service/DeviceCommandService.java
|
UNAUTHORIZED changed to NOT_FOUND status code in case if device.guid isn't equal to command.device.guid.
|
|
Java
|
apache-2.0
|
a54f8fde874e4d6083d34b874dd5cef8e1e4a90f
| 0
|
praveen1503/powermock,JiangYongGang/powermock,lincate/powermock,Willam2004/powermock,awenblue/powermock,mnky4a6/powermock,bsugitayasa/powermock,chembohuang/powermock,farmerjohngit/powermock,sujithps/powermock,573196010/powermock,priyambajaj215/powermock,amir1422/powermock,sumit-jaiswal/powermock,maidh91/powermock,Thoppan/powermock
|
/*
* Copyright 2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package samples.junit4.constructor;
import static org.easymock.EasyMock.createMock;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import static org.powermock.api.easymock.PowerMock.createPartialMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.powermock.reflect.exceptions.ConstructorNotFoundException;
import samples.Service;
import samples.constructor.PublicConstructorWithDependencyDemo;
/**
* Verifies that error messages are correct when the constructor cannot be found
* with partial mocking. This test asserts that the
* http://code.google.com/p/powertest/issues/detail?id=59 has been fixed.
*
*/
public class PublicConstructorWithDependencyDemoTest {
private Service serviceMock;
@Before
public void setUp() {
serviceMock = createMock(Service.class);
}
@After
public void tearDown() {
serviceMock = null;
}
/**
*
* @throws Exception
*/
@Test
public void testConstructorFound() throws Exception {
PublicConstructorWithDependencyDemo tested = createPartialMock(PublicConstructorWithDependencyDemo.class,
new String[] { "aMethod" }, serviceMock);
assertSame(serviceMock, tested.getService());
}
/**
*
* @throws Exception
*/
@Test
public void testConstructorNotFound() throws Exception {
try {
createPartialMock(PublicConstructorWithDependencyDemo.class, new String[] { "aMethod" }, serviceMock, "bad argument");
fail("Should throw ConstructorNotFoundException.");
} catch (ConstructorNotFoundException e) {
assertEquals("No constructor found in class '" + PublicConstructorWithDependencyDemo.class.getName()
+ "' with parameter types: [ " + Service.class.getName() + ", " + String.class.getName() + " ].", e.getMessage());
}
}
}
|
modules/module-test/powermock/junit4-test/src/test/java/samples/junit4/constructor/PublicConstructorWithDependencyDemoTest.java
|
/*
* Copyright 2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package samples.junit4.constructor;
import static org.easymock.EasyMock.createMock;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import static org.powermock.api.easymock.PowerMock.createPartialMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.powermock.reflect.exceptions.ConstructorNotFoundException;
import samples.Service;
import samples.constructor.PublicConstructorWithDependencyDemo;
/**
* Verifies that error messages are correct when the constructor cannot be found
* with partial mocking. This test asserts that the
* http://code.google.com/p/powertest/issues/detail?id=59 has been fixed.
*
*/
public class PublicConstructorWithDependencyDemoTest {
private Service serviceMock;
@Before
public void setUp() {
serviceMock = createMock(Service.class);
}
@After
public void tearDown() {
serviceMock = null;
}
/**
*
* @throws Exception
*/
@Test
public void testConstructorFound() throws Exception {
PublicConstructorWithDependencyDemo tested = createPartialMock(
PublicConstructorWithDependencyDemo.class,
new String[] { "aMethod" }, serviceMock);
assertSame(serviceMock, tested.getService());
}
/**
*
* @throws Exception
*/
@Test
public void testConstructorNotFound() throws Exception {
try {
createPartialMock(PublicConstructorWithDependencyDemo.class,
new String[] { "aMethod" }, serviceMock, "bad argument");
fail("Should throw ConstructorNotFoundException.");
} catch (ConstructorNotFoundException e) {
assertEquals("No constructor found in class '"
+ PublicConstructorWithDependencyDemo.class.getName()
+ "' with argument types: [ " + Service.class.getName()
+ ", " + String.class.getName() + " ]", e.getMessage());
}
}
}
|
Updated test to reflect changes to the Whitebox error message
|
modules/module-test/powermock/junit4-test/src/test/java/samples/junit4/constructor/PublicConstructorWithDependencyDemoTest.java
|
Updated test to reflect changes to the Whitebox error message
|
|
Java
|
apache-2.0
|
a1d3a4c4f7a9f4f29d99aac53045762db48ea31e
| 0
|
dangerousloveni/GeoWind
|
package xx.com.geowind;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
int c;
}
}
|
app/src/main/java/xx/com/geowind/MainActivity.java
|
package xx.com.geowind;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
int c;
int p;
int a;
int d;
int aa;
int f;
int s;
}
}
|
sssdsdfsdf
|
app/src/main/java/xx/com/geowind/MainActivity.java
|
sssdsdfsdf
|
|
Java
|
apache-2.0
|
925bf48ec0e3d1112814c047bd8dfb682cd1eb9c
| 0
|
rwinch/spring-security,djechelon/spring-security,rwinch/spring-security,fhanik/spring-security,jgrandja/spring-security,fhanik/spring-security,djechelon/spring-security,rwinch/spring-security,fhanik/spring-security,spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security,spring-projects/spring-security,rwinch/spring-security,jgrandja/spring-security,rwinch/spring-security,fhanik/spring-security,jgrandja/spring-security,fhanik/spring-security,jgrandja/spring-security,spring-projects/spring-security,jgrandja/spring-security,jgrandja/spring-security,rwinch/spring-security,djechelon/spring-security,djechelon/spring-security,spring-projects/spring-security,djechelon/spring-security,spring-projects/spring-security,fhanik/spring-security
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.annotation.web.configurers.oauth2.server.resource;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.security.KeyFactory;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.X509EncodedKeySpec;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Base64;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import javax.annotation.PreDestroy;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import org.hamcrest.core.AllOf;
import org.hamcrest.core.StringContains;
import org.hamcrest.core.StringEndsWith;
import org.hamcrest.core.StringStartsWith;
import org.junit.Rule;
import org.junit.Test;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.NoUniqueBeanDefinitionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.ApplicationContext;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.Environment;
import org.springframework.core.env.PropertySource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.RequestEntity;
import org.springframework.http.ResponseEntity;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.AuthenticationManagerResolver;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.HttpSecurityBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.config.test.SpringTestRule;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.oauth2.core.DefaultOAuth2AuthenticatedPrincipal;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2TokenValidator;
import org.springframework.security.oauth2.core.OAuth2TokenValidatorResult;
import org.springframework.security.oauth2.jwt.Jwt;
import org.springframework.security.oauth2.jwt.JwtClaimNames;
import org.springframework.security.oauth2.jwt.JwtDecoder;
import org.springframework.security.oauth2.jwt.JwtException;
import org.springframework.security.oauth2.jwt.JwtTimestampValidator;
import org.springframework.security.oauth2.jwt.NimbusJwtDecoder;
import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthentication;
import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter;
import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationToken;
import org.springframework.security.oauth2.server.resource.introspection.NimbusOpaqueTokenIntrospector;
import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector;
import org.springframework.security.oauth2.server.resource.web.BearerTokenAuthenticationEntryPoint;
import org.springframework.security.oauth2.server.resource.web.BearerTokenResolver;
import org.springframework.security.oauth2.server.resource.web.DefaultBearerTokenResolver;
import org.springframework.security.oauth2.server.resource.web.access.BearerTokenAccessDeniedHandler;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.AuthenticationEntryPoint;
import org.springframework.security.web.access.AccessDeniedHandler;
import org.springframework.security.web.access.AccessDeniedHandlerImpl;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.ResultMatcher;
import org.springframework.test.web.servlet.request.RequestPostProcessor;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestOperations;
import org.springframework.web.context.support.GenericWebApplicationContext;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.core.StringStartsWith.startsWith;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.security.config.Customizer.withDefaults;
import static org.springframework.security.oauth2.core.TestOAuth2AccessTokens.noScopes;
import static org.springframework.security.oauth2.jwt.NimbusJwtDecoder.withJwkSetUri;
import static org.springframework.security.oauth2.jwt.NimbusJwtDecoder.withPublicKey;
import static org.springframework.security.oauth2.jwt.TestJwts.jwt;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
/**
* Tests for {@link OAuth2ResourceServerConfigurer}
*
* @author Josh Cummings
*/
public class OAuth2ResourceServerConfigurerTests {
private static final String JWT_TOKEN = "token";
private static final String JWT_SUBJECT = "mock-test-subject";
private static final Map<String, Object> JWT_CLAIMS = Collections.singletonMap(JwtClaimNames.SUB, JWT_SUBJECT);
private static final Jwt JWT = jwt().build();
private static final String JWK_SET_URI = "https://mock.org";
private static final JwtAuthenticationToken JWT_AUTHENTICATION_TOKEN =
new JwtAuthenticationToken(JWT, Collections.emptyList());
private static final String INTROSPECTION_URI = "https://idp.example.com";
private static final String CLIENT_ID = "client-id";
private static final String CLIENT_SECRET = "client-secret";
private static final BearerTokenAuthentication INTROSPECTION_AUTHENTICATION_TOKEN =
new BearerTokenAuthentication(new DefaultOAuth2AuthenticatedPrincipal(JWT_CLAIMS, Collections.emptyList()),
noScopes(), Collections.emptyList());
@Autowired(required = false)
MockMvc mvc;
@Autowired(required = false)
MockWebServer web;
@Rule
public final SpringTestRule spring = new SpringTestRule();
@Test
public void getWhenUsingDefaultsWithValidBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingDefaultsInLambdaWithValidBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultInLambdaConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingJwkSetUriThenAcceptsRequest() throws Exception {
this.spring.register(WebServerConfig.class, JwkSetUriConfig.class, BasicController.class).autowire();
mockWebServer(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingJwkSetUriInLambdaThenAcceptsRequest() throws Exception {
this.spring.register(WebServerConfig.class, JwkSetUriInLambdaConfig.class, BasicController.class).autowire();
mockWebServer(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingDefaultsWithExpiredBearerTokenThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("Expired");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsWithBadJwkEndpointThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations("malformed");
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt: Malformed Jwk set"));
}
@Test
public void getWhenUsingDefaultsWithUnavailableJwkEndpointThenInvalidToken()
throws Exception {
this.spring.register(WebServerConfig.class, JwkSetUriConfig.class).autowire();
this.web.shutdown();
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsWithMalformedBearerTokenThenInvalidToken()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(get("/").with(bearerToken("an\"invalid\"token")))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("Bearer token is malformed"));
}
@Test
public void getWhenUsingDefaultsWithMalformedPayloadThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("MalformedPayload");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt: Malformed payload"));
}
@Test
public void getWhenUsingDefaultsWithUnsignedBearerTokenThenInvalidToken()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
String token = this.token("Unsigned");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("Unsupported algorithm of none"));
}
@Test
public void getWhenUsingDefaultsWithBearerTokenBeforeNotBeforeThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
this.mockRestOperations(jwks("Default"));
String token = this.token("TooEarly");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsWithBearerTokenInTwoPlacesThenInvalidRequest()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(get("/")
.with(bearerToken("token"))
.with(bearerToken("token").asParam()))
.andExpect(status().isBadRequest())
.andExpect(invalidRequestHeader("Found multiple bearer tokens in the request"));
}
@Test
public void getWhenUsingDefaultsWithBearerTokenInTwoParametersThenInvalidRequest()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
MultiValueMap<String, String> params = new LinkedMultiValueMap<>();
params.add("access_token", "token1");
params.add("access_token", "token2");
this.mvc.perform(get("/")
.params(params))
.andExpect(status().isBadRequest())
.andExpect(invalidRequestHeader("Found multiple bearer tokens in the request"));
}
@Test
public void postWhenUsingDefaultsWithBearerTokenAsFormParameterThenIgnoresToken()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(post("/") // engage csrf
.with(bearerToken("token").asParam()))
.andExpect(status().isForbidden())
.andExpect(header().doesNotExist(HttpHeaders.WWW_AUTHENTICATE));
}
@Test
public void postWhenCsrfDisabledWithBearerTokenAsFormParameterThenIgnoresToken()
throws Exception {
this.spring.register(CsrfDisabledConfig.class).autowire();
this.mvc.perform(post("/")
.with(bearerToken("token").asParam()))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, "Bearer"));
}
@Test
public void getWhenUsingDefaultsWithNoBearerTokenThenUnauthorized()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(get("/"))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, "Bearer"));
}
@Test
public void getWhenUsingDefaultsWithSufficientlyScopedBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScope");
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("[SCOPE_message:read]"));
}
@Test
public void getWhenUsingDefaultsWithInsufficientScopeThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingDefaultsWithInsufficientScpThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageWriteScp");
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingDefaultsAndAuthorizationServerHasNoMatchingKeyThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations(jwks("Empty"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsAndAuthorizationServerHasMultipleMatchingKeysThenOk()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("TwoKeys"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void getWhenUsingDefaultsAndKeyMatchesByKidThenOk()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("TwoKeys"));
String token = this.token("Kid");
this.mvc.perform(get("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
// -- Method Security
@Test
public void getWhenUsingMethodSecurityWithValidBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScope");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("[SCOPE_message:read]"));
}
@Test
public void getWhenUsingMethodSecurityWithValidBearerTokenHavingScpAttributeThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScp");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("[SCOPE_message:read]"));
}
@Test
public void getWhenUsingMethodSecurityWithInsufficientScopeThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingMethodSecurityWithInsufficientScpThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageWriteScp");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingMethodSecurityWithDenyAllThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScope");
this.mvc.perform(get("/ms-deny")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
// -- Resource Server should not engage csrf
@Test
public void postWhenUsingDefaultsWithValidBearerTokenAndNoCsrfTokenThenOk()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(post("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void postWhenUsingDefaultsWithNoBearerTokenThenCsrfDenies()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(post("/authenticated"))
.andExpect(status().isForbidden())
.andExpect(header().doesNotExist(HttpHeaders.WWW_AUTHENTICATE));
}
@Test
public void postWhenUsingDefaultsWithExpiredBearerTokenAndNoCsrfThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("Expired");
this.mvc.perform(post("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
// -- Resource Server should not create sessions
@Test
public void requestWhenDefaultConfiguredThenSessionIsNotCreated()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
MvcResult result = this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNull();
}
@Test
public void requestWhenIntrospectionConfiguredThenSessionIsNotCreated()
throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class, BasicController.class).autowire();
mockRestOperations(json("Active"));
MvcResult result = this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"))
.andReturn();
assertThat(result.getRequest().getSession(false)).isNull();
}
@Test
public void requestWhenUsingDefaultsAndNoBearerTokenThenSessionIsCreated()
throws Exception {
this.spring.register(JwkSetUriConfig.class, BasicController.class).autowire();
MvcResult result = this.mvc.perform(get("/"))
.andExpect(status().isUnauthorized())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNotNull();
}
@Test
public void requestWhenSessionManagementConfiguredThenUserConfigurationOverrides()
throws Exception {
this.spring.register(RestOperationsConfig.class, AlwaysSessionCreationConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
MvcResult result = this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNotNull();
}
// -- custom bearer token resolver
@Test
public void requestWhenBearerTokenResolverAllowsRequestBodyThenEitherHeaderOrRequestBodyIsAccepted()
throws Exception {
this.spring.register(AllowBearerTokenInRequestBodyConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
this.mvc.perform(post("/authenticated")
.param("access_token", JWT_TOKEN))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenBearerTokenResolverAllowsQueryParameterThenEitherHeaderOrQueryParameterIsAccepted()
throws Exception {
this.spring.register(AllowBearerTokenAsQueryParameterConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
this.mvc.perform(get("/authenticated")
.param("access_token", JWT_TOKEN))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenBearerTokenResolverAllowsRequestBodyAndRequestContainsTwoTokensThenInvalidRequest()
throws Exception {
this.spring.register(AllowBearerTokenInRequestBodyConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(post("/authenticated")
.param("access_token", JWT_TOKEN)
.with(bearerToken(JWT_TOKEN))
.with(csrf()))
.andExpect(status().isBadRequest())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("invalid_request")));
}
@Test
public void requestWhenBearerTokenResolverAllowsQueryParameterAndRequestContainsTwoTokensThenInvalidRequest()
throws Exception {
this.spring.register(AllowBearerTokenAsQueryParameterConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN))
.param("access_token", JWT_TOKEN))
.andExpect(status().isBadRequest())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("invalid_request")));
}
@Test
public void getBearerTokenResolverWhenDuplicateResolverBeansAndAnotherOnTheDslThenTheDslOneIsUsed() {
BearerTokenResolver resolverBean = mock(BearerTokenResolver.class);
BearerTokenResolver resolver = mock(BearerTokenResolver.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean("resolverOne", BearerTokenResolver.class, () -> resolverBean);
context.registerBean("resolverTwo", BearerTokenResolver.class, () -> resolverBean);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer oauth2 = new OAuth2ResourceServerConfigurer(context);
oauth2.bearerTokenResolver(resolver);
assertThat(oauth2.getBearerTokenResolver()).isEqualTo(resolver);
}
@Test
public void getBearerTokenResolverWhenDuplicateResolverBeansThenWiringException() {
assertThatCode(() -> this.spring.register(MultipleBearerTokenResolverBeansConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasRootCauseInstanceOf(NoUniqueBeanDefinitionException.class);
}
@Test
public void getBearerTokenResolverWhenResolverBeanAndAnotherOnTheDslThenTheDslOneIsUsed() {
BearerTokenResolver resolver = mock(BearerTokenResolver.class);
BearerTokenResolver resolverBean = mock(BearerTokenResolver.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean(BearerTokenResolver.class, () -> resolverBean);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer oauth2 = new OAuth2ResourceServerConfigurer(context);
oauth2.bearerTokenResolver(resolver);
assertThat(oauth2.getBearerTokenResolver()).isEqualTo(resolver);
}
@Test
public void getBearerTokenResolverWhenNoResolverSpecifiedThenTheDefaultIsUsed() {
ApplicationContext context =
this.spring.context(new GenericWebApplicationContext()).getContext();
OAuth2ResourceServerConfigurer oauth2 = new OAuth2ResourceServerConfigurer(context);
assertThat(oauth2.getBearerTokenResolver()).isInstanceOf(DefaultBearerTokenResolver.class);
}
// -- custom jwt decoder
@Test
public void requestWhenCustomJwtDecoderWiredOnDslThenUsed()
throws Exception {
this.spring.register(CustomJwtDecoderOnDsl.class, BasicController.class).autowire();
CustomJwtDecoderOnDsl config = this.spring.getContext().getBean(CustomJwtDecoderOnDsl.class);
JwtDecoder decoder = config.decoder();
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenCustomJwtDecoderInLambdaOnDslThenUsed()
throws Exception {
this.spring.register(CustomJwtDecoderInLambdaOnDsl.class, BasicController.class).autowire();
CustomJwtDecoderInLambdaOnDsl config = this.spring.getContext().getBean(CustomJwtDecoderInLambdaOnDsl.class);
JwtDecoder decoder = config.decoder();
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenCustomJwtDecoderExposedAsBeanThenUsed()
throws Exception {
this.spring.register(CustomJwtDecoderAsBean.class, BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void getJwtDecoderWhenConfiguredWithDecoderAndJwkSetUriThenLastOneWins() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
JwtDecoder decoder = mock(JwtDecoder.class);
jwtConfigurer.jwkSetUri(JWK_SET_URI);
jwtConfigurer.decoder(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isEqualTo(decoder);
jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.decoder(decoder);
jwtConfigurer.jwkSetUri(JWK_SET_URI);
assertThat(jwtConfigurer.getJwtDecoder()).isInstanceOf(NimbusJwtDecoder.class);
}
@Test
public void getJwtDecoderWhenConflictingJwtDecodersThenTheDslWiredOneTakesPrecedence() {
JwtDecoder decoderBean = mock(JwtDecoder.class);
JwtDecoder decoder = mock(JwtDecoder.class);
ApplicationContext context = mock(ApplicationContext.class);
when(context.getBean(JwtDecoder.class)).thenReturn(decoderBean);
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.decoder(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isEqualTo(decoder);
}
@Test
public void getJwtDecoderWhenContextHasBeanAndUserConfiguresJwkSetUriThenJwkSetUriTakesPrecedence() {
JwtDecoder decoder = mock(JwtDecoder.class);
ApplicationContext context = mock(ApplicationContext.class);
when(context.getBean(JwtDecoder.class)).thenReturn(decoder);
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.jwkSetUri(JWK_SET_URI);
assertThat(jwtConfigurer.getJwtDecoder()).isNotEqualTo(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isInstanceOf(NimbusJwtDecoder.class);
}
@Test
public void getJwtDecoderWhenTwoJwtDecoderBeansAndAnotherWiredOnDslThenDslWiredOneTakesPrecedence() {
JwtDecoder decoderBean = mock(JwtDecoder.class);
JwtDecoder decoder = mock(JwtDecoder.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean("decoderOne", JwtDecoder.class, () -> decoderBean);
context.registerBean("decoderTwo", JwtDecoder.class, () -> decoderBean);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.decoder(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isEqualTo(decoder);
}
@Test
public void getJwtDecoderWhenTwoJwtDecoderBeansThenThrowsException() {
JwtDecoder decoder = mock(JwtDecoder.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean("decoderOne", JwtDecoder.class, () -> decoder);
context.registerBean("decoderTwo", JwtDecoder.class, () -> decoder);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
assertThatCode(() -> jwtConfigurer.getJwtDecoder())
.isInstanceOf(NoUniqueBeanDefinitionException.class);
}
// -- exception handling
@Test
public void requestWhenRealmNameConfiguredThenUsesOnUnauthenticated()
throws Exception {
this.spring.register(RealmNameConfiguredOnEntryPoint.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenThrow(JwtException.class);
this.mvc.perform(get("/authenticated")
.with(bearerToken("invalid_token")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer realm=\"myRealm\"")));
}
@Test
public void requestWhenRealmNameConfiguredThenUsesOnAccessDenied()
throws Exception {
this.spring.register(RealmNameConfiguredOnAccessDeniedHandler.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken("insufficiently_scoped")))
.andExpect(status().isForbidden())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer realm=\"myRealm\"")));
}
@Test
public void authenticationEntryPointWhenGivenNullThenThrowsException() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer configurer = new OAuth2ResourceServerConfigurer(context);
assertThatCode(() -> configurer.authenticationEntryPoint(null))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void accessDeniedHandlerWhenGivenNullThenThrowsException() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer configurer = new OAuth2ResourceServerConfigurer(context);
assertThatCode(() -> configurer.accessDeniedHandler(null))
.isInstanceOf(IllegalArgumentException.class);
}
// -- token validator
@Test
public void requestWhenCustomJwtValidatorFailsThenCorrespondingErrorMessage()
throws Exception {
this.spring.register(RestOperationsConfig.class, CustomJwtValidatorConfig.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
OAuth2TokenValidator<Jwt> jwtValidator =
this.spring.getContext().getBean(CustomJwtValidatorConfig.class)
.getJwtValidator();
OAuth2Error error = new OAuth2Error("custom-error", "custom-description", "custom-uri");
when(jwtValidator.validate(any(Jwt.class))).thenReturn(OAuth2TokenValidatorResult.failure(error));
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("custom-description")));
}
@Test
public void requestWhenClockSkewSetThenTimestampWindowRelaxedAccordingly()
throws Exception {
this.spring.register(RestOperationsConfig.class, UnexpiredJwtClockSkewConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ExpiresAt4687177990");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk());
}
@Test
public void requestWhenClockSkewSetButJwtStillTooLateThenReportsExpired()
throws Exception {
this.spring.register(RestOperationsConfig.class, ExpiredJwtClockSkewConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ExpiresAt4687177990");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("Jwt expired at"));
}
// -- converter
@Test
public void requestWhenJwtAuthenticationConverterConfiguredOnDslThenIsUsed()
throws Exception {
this.spring.register(JwtDecoderConfig.class, JwtAuthenticationConverterConfiguredOnDsl.class,
BasicController.class).autowire();
Converter<Jwt, JwtAuthenticationToken> jwtAuthenticationConverter =
this.spring.getContext().getBean(JwtAuthenticationConverterConfiguredOnDsl.class)
.getJwtAuthenticationConverter();
when(jwtAuthenticationConverter.convert(JWT)).thenReturn(JWT_AUTHENTICATION_TOKEN);
JwtDecoder jwtDecoder = this.spring.getContext().getBean(JwtDecoder.class);
when(jwtDecoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk());
verify(jwtAuthenticationConverter).convert(JWT);
}
@Test
public void requestWhenJwtAuthenticationConverterCustomizedAuthoritiesThenThoseAuthoritiesArePropagated()
throws Exception {
this.spring.register(JwtDecoderConfig.class, CustomAuthorityMappingConfig.class, BasicController.class)
.autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(JWT_TOKEN)).thenReturn(JWT);
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk());
}
// -- single key
@Test
public void requestWhenUsingPublicKeyAndValidTokenThenAuthenticates()
throws Exception {
this.spring.register(SingleKeyConfig.class, BasicController.class).autowire();
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk());
}
@Test
public void requestWhenUsingPublicKeyAndSignatureFailsThenReturnsInvalidToken()
throws Exception {
this.spring.register(SingleKeyConfig.class).autowire();
String token = this.token("WrongSignature");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(invalidTokenHeader("signature"));
}
@Test
public void requestWhenUsingPublicKeyAlgorithmDoesNotMatchThenReturnsInvalidToken()
throws Exception {
this.spring.register(SingleKeyConfig.class).autowire();
String token = this.token("WrongAlgorithm");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(invalidTokenHeader("algorithm"));
}
@Test
public void getWhenCustomJwtAuthenticationManagerThenUsed() throws Exception {
this.spring.register(JwtAuthenticationManagerConfig.class, BasicController.class).autowire();
when(bean(AuthenticationProvider.class).authenticate(any(Authentication.class)))
.thenReturn(JWT_AUTHENTICATION_TOKEN);
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("mock-test-subject"));
verifyBean(AuthenticationProvider.class).authenticate(any(Authentication.class));
}
// -- opaque
@Test
public void getWhenIntrospectingThenOk() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class, BasicController.class).autowire();
mockRestOperations(json("Active"));
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void getWhenOpaqueTokenInLambdaAndIntrospectingThenOk() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenInLambdaConfig.class, BasicController.class).autowire();
mockRestOperations(json("Active"));
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void getWhenIntrospectionFailsThenUnauthorized() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class).autowire();
mockRestOperations(json("Inactive"));
this.mvc.perform(get("/")
.with(bearerToken("token")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE,
containsString("Provided token [token] isn't active")));
}
@Test
public void getWhenIntrospectionLacksScopeThenForbidden() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class).autowire();
mockRestOperations(json("ActiveNoScopes"));
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken("token")))
.andExpect(status().isForbidden())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("scope")));
}
@Test
public void getWhenCustomIntrospectionAuthenticationManagerThenUsed() throws Exception {
this.spring.register(OpaqueTokenAuthenticationManagerConfig.class, BasicController.class).autowire();
when(bean(AuthenticationProvider.class).authenticate(any(Authentication.class)))
.thenReturn(INTROSPECTION_AUTHENTICATION_TOKEN);
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("mock-test-subject"));
verifyBean(AuthenticationProvider.class).authenticate(any(Authentication.class));
}
@Test
public void getWhenCustomIntrospectionAuthenticationManagerInLambdaThenUsed() throws Exception {
this.spring.register(OpaqueTokenAuthenticationManagerInLambdaConfig.class, BasicController.class).autowire();
when(bean(AuthenticationProvider.class).authenticate(any(Authentication.class)))
.thenReturn(INTROSPECTION_AUTHENTICATION_TOKEN);
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("mock-test-subject"));
verifyBean(AuthenticationProvider.class).authenticate(any(Authentication.class));
}
@Test
public void configureWhenOnlyIntrospectionUrlThenException() {
assertThatCode(() -> this.spring.register(OpaqueTokenHalfConfiguredConfig.class).autowire())
.isInstanceOf(BeanCreationException.class);
}
@Test
public void getIntrospectionClientWhenConfiguredWithClientAndIntrospectionUriThenLastOneWins() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer.OpaqueTokenConfigurer opaqueTokenConfigurer =
new OAuth2ResourceServerConfigurer(context).opaqueToken();
OpaqueTokenIntrospector client = mock(OpaqueTokenIntrospector.class);
opaqueTokenConfigurer.introspectionUri(INTROSPECTION_URI);
opaqueTokenConfigurer.introspectionClientCredentials(CLIENT_ID, CLIENT_SECRET);
opaqueTokenConfigurer.introspector(client);
assertThat(opaqueTokenConfigurer.getIntrospector()).isEqualTo(client);
opaqueTokenConfigurer =
new OAuth2ResourceServerConfigurer(context).opaqueToken();
opaqueTokenConfigurer.introspector(client);
opaqueTokenConfigurer.introspectionUri(INTROSPECTION_URI);
opaqueTokenConfigurer.introspectionClientCredentials(CLIENT_ID, CLIENT_SECRET);
assertThat(opaqueTokenConfigurer.getIntrospector())
.isInstanceOf(NimbusOpaqueTokenIntrospector.class);
}
@Test
public void getIntrospectionClientWhenDslAndBeanWiredThenDslTakesPrecedence() {
GenericApplicationContext context = new GenericApplicationContext();
registerMockBean(context, "introspectionClientOne", OpaqueTokenIntrospector.class);
registerMockBean(context, "introspectionClientTwo", OpaqueTokenIntrospector.class);
OAuth2ResourceServerConfigurer.OpaqueTokenConfigurer opaqueToken =
new OAuth2ResourceServerConfigurer(context).opaqueToken();
opaqueToken.introspectionUri(INTROSPECTION_URI);
opaqueToken.introspectionClientCredentials(CLIENT_ID, CLIENT_SECRET);
assertThat(opaqueToken.getIntrospector()).isNotNull();
}
// -- In combination with other authentication providers
@Test
public void requestWhenBasicAndResourceServerEntryPointsThenMatchedByRequest()
throws Exception {
this.spring.register(BasicAndResourceServerConfig.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenThrow(JwtException.class);
this.mvc.perform(get("/authenticated")
.with(httpBasic("some", "user")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Basic")));
this.mvc.perform(get("/authenticated"))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Basic")));
this.mvc.perform(get("/authenticated")
.with(bearerToken("invalid_token")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer")));
}
@Test
public void requestWhenFormLoginAndResourceServerEntryPointsThenSessionCreatedByRequest()
throws Exception {
this.spring.register(FormAndResourceServerConfig.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenThrow(JwtException.class);
MvcResult result =
this.mvc.perform(get("/authenticated"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("http://localhost/login"))
.andReturn();
assertThat(result.getRequest().getSession(false)).isNotNull();
result =
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isUnauthorized())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNull();
}
@Test
public void requestWhenDefaultAndResourceServerAccessDeniedHandlersThenMatchedByRequest()
throws Exception {
this.spring.register(ExceptionHandlingAndResourceServerWithAccessDeniedHandlerConfig.class,
JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(httpBasic("basic-user", "basic-password")))
.andExpect(status().isForbidden())
.andExpect(header().doesNotExist(HttpHeaders.WWW_AUTHENTICATE));
this.mvc.perform(get("/authenticated")
.with(bearerToken("insufficiently_scoped")))
.andExpect(status().isForbidden())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer")));
}
@Test
public void getWhenAlsoUsingHttpBasicThenCorrectProviderEngages()
throws Exception {
this.spring.register(RestOperationsConfig.class, BasicAndResourceServerConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
this.mvc.perform(get("/authenticated")
.with(httpBasic("basic-user", "basic-password")))
.andExpect(status().isOk())
.andExpect(content().string("basic-user"));
}
// -- authentication manager
@Test
public void getAuthenticationManagerWhenConfiguredAuthenticationManagerThenTakesPrecedence() {
ApplicationContext context = mock(ApplicationContext.class);
HttpSecurityBuilder http = mock(HttpSecurityBuilder.class);
OAuth2ResourceServerConfigurer oauth2ResourceServer = new OAuth2ResourceServerConfigurer(context);
AuthenticationManager authenticationManager = mock(AuthenticationManager.class);
oauth2ResourceServer
.jwt()
.authenticationManager(authenticationManager)
.decoder(mock(JwtDecoder.class));
assertThat(oauth2ResourceServer.getAuthenticationManager(http)).isSameAs(authenticationManager);
oauth2ResourceServer = new OAuth2ResourceServerConfigurer(context);
oauth2ResourceServer
.opaqueToken()
.authenticationManager(authenticationManager)
.introspector(mock(OpaqueTokenIntrospector.class));
assertThat(oauth2ResourceServer.getAuthenticationManager(http)).isSameAs(authenticationManager);
verify(http, never()).authenticationProvider(any(AuthenticationProvider.class));
}
// -- Incorrect Configuration
@Test
public void configuredWhenMissingJwtAuthenticationProviderThenWiringException() {
assertThatCode(() -> this.spring.register(JwtlessConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("neither was found");
}
@Test
public void configureWhenMissingJwkSetUriThenWiringException() {
assertThatCode(() -> this.spring.register(JwtHalfConfiguredConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("No qualifying bean of type");
}
@Test
public void configureWhenUsingBothJwtAndOpaqueThenWiringException() {
assertThatCode(() -> this.spring.register(OpaqueAndJwtConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("Spring Security only supports JWTs or Opaque Tokens");
}
@Test
public void configureWhenUsingBothAuthenticationManagerResolverAndOpaqueThenWiringException() {
assertThatCode(() -> this.spring.register(AuthenticationManagerResolverPlusOtherConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("authenticationManagerResolver");
}
// -- support
@EnableWebSecurity
static class DefaultConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class DefaultInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.jwt(withDefaults())
);
// @formatter:on
}
}
@EnableWebSecurity
static class JwkSetUriConfig extends WebSecurityConfigurerAdapter {
@Value("${mockwebserver.url:https://example.org}")
String jwkSetUri;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.jwkSetUri(this.jwkSetUri);
// @formatter:on
}
}
@EnableWebSecurity
static class JwkSetUriInLambdaConfig extends WebSecurityConfigurerAdapter {
@Value("${mockwebserver.url:https://example.org}")
String jwkSetUri;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.jwt(jwt ->
jwt
.jwkSetUri(this.jwkSetUri)
)
);
// @formatter:on
}
}
@EnableWebSecurity
static class CsrfDisabledConfig extends WebSecurityConfigurerAdapter {
@Value("${mockwebserver.url:https://example.org}")
String jwkSetUri;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
.and()
.csrf().disable()
.oauth2ResourceServer()
.jwt()
.jwkSetUri(this.jwkSetUri);
// @formatter:on
}
}
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true)
static class MethodSecurityConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class JwtlessConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer();
// @formatter:on
}
}
@EnableWebSecurity
static class RealmNameConfiguredOnEntryPoint extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.authenticationEntryPoint(authenticationEntryPoint())
.jwt();
// @formatter:on
}
AuthenticationEntryPoint authenticationEntryPoint() {
BearerTokenAuthenticationEntryPoint entryPoint =
new BearerTokenAuthenticationEntryPoint();
entryPoint.setRealmName("myRealm");
return entryPoint;
}
}
@EnableWebSecurity
static class RealmNameConfiguredOnAccessDeniedHandler extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().denyAll()
.and()
.oauth2ResourceServer()
.accessDeniedHandler(accessDeniedHandler())
.jwt();
// @formatter:on
}
AccessDeniedHandler accessDeniedHandler() {
BearerTokenAccessDeniedHandler accessDeniedHandler =
new BearerTokenAccessDeniedHandler();
accessDeniedHandler.setRealmName("myRealm");
return accessDeniedHandler;
}
}
@EnableWebSecurity
static class ExceptionHandlingAndResourceServerWithAccessDeniedHandlerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().denyAll()
.and()
.exceptionHandling()
.defaultAccessDeniedHandlerFor(new AccessDeniedHandlerImpl(), request -> false)
.and()
.httpBasic()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
public UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(
org.springframework.security.core.userdetails.User.withDefaultPasswordEncoder()
.username("basic-user")
.password("basic-password")
.roles("USER")
.build());
}
}
@EnableWebSecurity
static class JwtAuthenticationConverterConfiguredOnDsl extends WebSecurityConfigurerAdapter {
private final Converter<Jwt, JwtAuthenticationToken> jwtAuthenticationConverter = mock(Converter.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.jwtAuthenticationConverter(getJwtAuthenticationConverter());
// @formatter:on
}
Converter<Jwt, JwtAuthenticationToken> getJwtAuthenticationConverter() {
return this.jwtAuthenticationConverter;
}
}
@EnableWebSecurity
static class CustomAuthorityMappingConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('message:read')")
.and()
.oauth2ResourceServer()
.jwt()
.jwtAuthenticationConverter(getJwtAuthenticationConverter());
// @formatter:on
}
Converter<Jwt, AbstractAuthenticationToken> getJwtAuthenticationConverter() {
JwtAuthenticationConverter converter = new JwtAuthenticationConverter();
converter.setJwtGrantedAuthoritiesConverter(jwt ->
Collections.singletonList(new SimpleGrantedAuthority("message:read"))
);
return converter;
}
}
@EnableWebSecurity
static class BasicAndResourceServerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.httpBasic()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
public UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(
org.springframework.security.core.userdetails.User.withDefaultPasswordEncoder()
.username("basic-user")
.password("basic-password")
.roles("USER")
.build());
}
}
@EnableWebSecurity
static class FormAndResourceServerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.formLogin()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class JwtHalfConfiguredConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt(); // missing key configuration, e.g. jwkSetUri
// @formatter:on
}
}
@EnableWebSecurity
static class AlwaysSessionCreationConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.ALWAYS)
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class AllowBearerTokenInRequestBodyConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.bearerTokenResolver(allowRequestBody())
.jwt();
// @formatter:on
}
private BearerTokenResolver allowRequestBody() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowFormEncodedBodyParameter(true);
return resolver;
}
}
@EnableWebSecurity
static class AllowBearerTokenAsQueryParameterConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
BearerTokenResolver allowQueryParameter() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowUriQueryParameter(true);
return resolver;
}
}
@EnableWebSecurity
static class MultipleBearerTokenResolverBeansConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
BearerTokenResolver resolverOne() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowUriQueryParameter(true);
return resolver;
}
@Bean
BearerTokenResolver resolverTwo() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowFormEncodedBodyParameter(true);
return resolver;
}
}
@EnableWebSecurity
static class CustomJwtDecoderOnDsl extends WebSecurityConfigurerAdapter {
JwtDecoder decoder = mock(JwtDecoder.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.decoder(decoder());
// @formatter:on
}
JwtDecoder decoder() {
return this.decoder;
}
}
@EnableWebSecurity
static class CustomJwtDecoderInLambdaOnDsl extends WebSecurityConfigurerAdapter {
JwtDecoder decoder = mock(JwtDecoder.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.jwt(jwt ->
jwt
.decoder(decoder())
)
);
// @formatter:on
}
JwtDecoder decoder() {
return this.decoder;
}
}
@EnableWebSecurity
static class CustomJwtDecoderAsBean extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
public JwtDecoder decoder() {
return mock(JwtDecoder.class);
}
}
@EnableWebSecurity
static class JwtAuthenticationManagerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.authenticationManager(authenticationProvider()::authenticate);
// @formatter:on
}
@Bean
public AuthenticationProvider authenticationProvider() {
return mock(AuthenticationProvider.class);
}
}
@EnableWebSecurity
static class CustomJwtValidatorConfig extends WebSecurityConfigurerAdapter {
@Autowired
NimbusJwtDecoder jwtDecoder;
private final OAuth2TokenValidator<Jwt> jwtValidator = mock(OAuth2TokenValidator.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
this.jwtDecoder.setJwtValidator(this.jwtValidator);
// @formatter:off
http
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
public OAuth2TokenValidator<Jwt> getJwtValidator() {
return this.jwtValidator;
}
}
@EnableWebSecurity
static class UnexpiredJwtClockSkewConfig extends WebSecurityConfigurerAdapter {
@Autowired
NimbusJwtDecoder jwtDecoder;
@Override
protected void configure(HttpSecurity http) throws Exception {
Clock nearlyAnHourFromTokenExpiry =
Clock.fixed(Instant.ofEpochMilli(4687181540000L), ZoneId.systemDefault());
JwtTimestampValidator jwtValidator = new JwtTimestampValidator(Duration.ofHours(1));
jwtValidator.setClock(nearlyAnHourFromTokenExpiry);
this.jwtDecoder.setJwtValidator(jwtValidator);
// @formatter:off
http
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class ExpiredJwtClockSkewConfig extends WebSecurityConfigurerAdapter {
@Autowired
NimbusJwtDecoder jwtDecoder;
@Override
protected void configure(HttpSecurity http) throws Exception {
Clock justOverOneHourAfterExpiry =
Clock.fixed(Instant.ofEpochMilli(4687181595000L), ZoneId.systemDefault());
JwtTimestampValidator jwtValidator = new JwtTimestampValidator(Duration.ofHours(1));
jwtValidator.setClock(justOverOneHourAfterExpiry);
this.jwtDecoder.setJwtValidator(jwtValidator);
// @formatter:off
http
.oauth2ResourceServer()
.jwt();
}
}
@EnableWebSecurity
static class SingleKeyConfig extends WebSecurityConfigurerAdapter {
byte[] spec = Base64.getDecoder().decode(
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoXJ8OyOv/eRnce4akdan" +
"R4KYRfnC2zLV4uYNQpcFn6oHL0dj7D6kxQmsXoYgJV8ZVDn71KGmuLvolxsDncc2" +
"UrhyMBY6DVQVgMSVYaPCTgW76iYEKGgzTEw5IBRQL9w3SRJWd3VJTZZQjkXef48O" +
"cz06PGF3lhbz4t5UEZtdF4rIe7u+977QwHuh7yRPBQ3sII+cVoOUMgaXB9SHcGF2" +
"iZCtPzL/IffDUcfhLQteGebhW8A6eUHgpD5A1PQ+JCw/G7UOzZAjjDjtNM2eqm8j" +
"+Ms/gqnm4MiCZ4E+9pDN77CAAPVN7kuX6ejs9KBXpk01z48i9fORYk9u7rAkh1Hu" +
"QwIDAQAB");
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
JwtDecoder decoder() throws Exception {
RSAPublicKey publicKey = (RSAPublicKey)
KeyFactory.getInstance("RSA").generatePublic(new X509EncodedKeySpec(this.spec));
return withPublicKey(publicKey).build();
}
}
@EnableWebSecurity
static class OpaqueTokenConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").hasAuthority("SCOPE_message:read")
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.opaqueToken();
// @formatter:on
}
}
@EnableWebSecurity
static class OpaqueTokenInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.antMatchers("/requires-read-scope").hasAuthority("SCOPE_message:read")
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.opaqueToken(withDefaults())
);
// @formatter:on
}
}
@EnableWebSecurity
static class OpaqueTokenAuthenticationManagerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.opaqueToken()
.authenticationManager(authenticationProvider()::authenticate);
// @formatter:on
}
@Bean
public AuthenticationProvider authenticationProvider() {
return mock(AuthenticationProvider.class);
}
}
@EnableWebSecurity
static class OpaqueTokenAuthenticationManagerInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.opaqueToken(opaqueToken ->
opaqueToken
.authenticationManager(authenticationProvider()::authenticate)
)
);
// @formatter:on
}
@Bean
public AuthenticationProvider authenticationProvider() {
return mock(AuthenticationProvider.class);
}
}
@EnableWebSecurity
static class OpaqueAndJwtConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.oauth2ResourceServer()
.jwt()
.and()
.opaqueToken();
}
}
@EnableWebSecurity
static class OpaqueTokenHalfConfiguredConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.opaqueToken()
.introspectionUri("https://idp.example.com"); // missing credentials
// @formatter:on
}
}
@EnableWebSecurity
static class AuthenticationManagerResolverPlusOtherConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.authenticationManagerResolver(mock(AuthenticationManagerResolver.class))
.opaqueToken();
}
}
@Configuration
static class JwtDecoderConfig {
@Bean
public JwtDecoder jwtDecoder() {
return mock(JwtDecoder.class);
}
}
@RestController
static class BasicController {
@GetMapping("/")
public String get() {
return "ok";
}
@PostMapping("/post")
public String post() {
return "post";
}
@RequestMapping(value = "/authenticated", method = { GET, POST })
public String authenticated(Authentication authentication) {
return authentication.getName();
}
@GetMapping("/requires-read-scope")
public String requiresReadScope(JwtAuthenticationToken token) {
return token.getAuthorities().stream()
.map(GrantedAuthority::getAuthority)
.collect(Collectors.toList()).toString();
}
@GetMapping("/ms-requires-read-scope")
@PreAuthorize("hasAuthority('SCOPE_message:read')")
public String msRequiresReadScope(JwtAuthenticationToken token) {
return requiresReadScope(token);
}
@GetMapping("/ms-deny")
@PreAuthorize("denyAll")
public String deny() {
return "hmm, that's odd";
}
}
@Configuration
static class WebServerConfig implements BeanPostProcessor, EnvironmentAware {
private final MockWebServer server = new MockWebServer();
@PreDestroy
public void shutdown() throws IOException {
this.server.shutdown();
}
@Override
public void setEnvironment(Environment environment) {
if (environment instanceof ConfigurableEnvironment) {
((ConfigurableEnvironment) environment)
.getPropertySources().addFirst(new MockWebServerPropertySource());
}
}
@Bean
public MockWebServer web() {
return this.server;
}
private class MockWebServerPropertySource extends PropertySource {
MockWebServerPropertySource() {
super("mockwebserver");
}
@Override
public Object getProperty(String name) {
if ("mockwebserver.url".equals(name)) {
return WebServerConfig.this.server.url("/.well-known/jwks.json").toString();
} else {
return null;
}
}
}
}
@Configuration
static class RestOperationsConfig {
RestOperations rest = mock(RestOperations.class);
@Bean
RestOperations rest() {
return this.rest;
}
@Bean
NimbusJwtDecoder jwtDecoder() {
return withJwkSetUri("https://example.org/.well-known/jwks.json")
.restOperations(this.rest).build();
}
@Bean
NimbusOpaqueTokenIntrospector tokenIntrospectionClient() {
return new NimbusOpaqueTokenIntrospector("https://example.org/introspect", this.rest);
}
}
private static <T> void registerMockBean(GenericApplicationContext context, String name, Class<T> clazz) {
context.registerBean(name, clazz, () -> mock(clazz));
}
private static class BearerTokenRequestPostProcessor implements RequestPostProcessor {
private boolean asRequestParameter;
private String token;
BearerTokenRequestPostProcessor(String token) {
this.token = token;
}
public BearerTokenRequestPostProcessor asParam() {
this.asRequestParameter = true;
return this;
}
@Override
public MockHttpServletRequest postProcessRequest(MockHttpServletRequest request) {
if (this.asRequestParameter) {
request.setParameter("access_token", this.token);
} else {
request.addHeader("Authorization", "Bearer " + this.token);
}
return request;
}
}
private static BearerTokenRequestPostProcessor bearerToken(String token) {
return new BearerTokenRequestPostProcessor(token);
}
private static ResultMatcher invalidRequestHeader(String message) {
return header().string(HttpHeaders.WWW_AUTHENTICATE,
AllOf.allOf(
new StringStartsWith("Bearer " +
"error=\"invalid_request\", " +
"error_description=\""),
new StringContains(message),
new StringEndsWith(", " +
"error_uri=\"https://tools.ietf.org/html/rfc6750#section-3.1\"")
)
);
}
private static ResultMatcher invalidTokenHeader(String message) {
return header().string(HttpHeaders.WWW_AUTHENTICATE,
AllOf.allOf(
new StringStartsWith("Bearer " +
"error=\"invalid_token\", " +
"error_description=\""),
new StringContains(message),
new StringEndsWith(", " +
"error_uri=\"https://tools.ietf.org/html/rfc6750#section-3.1\"")
)
);
}
private static ResultMatcher insufficientScopeHeader() {
return header().string(HttpHeaders.WWW_AUTHENTICATE, "Bearer " +
"error=\"insufficient_scope\"" +
", error_description=\"The request requires higher privileges than provided by the access token.\"" +
", error_uri=\"https://tools.ietf.org/html/rfc6750#section-3.1\"");
}
private void mockWebServer(String response) {
this.web.enqueue(new MockResponse()
.setResponseCode(200)
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)
.setBody(response));
}
private void mockRestOperations(String response) {
RestOperations rest = this.spring.getContext().getBean(RestOperations.class);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
ResponseEntity<String> entity = new ResponseEntity<>(response, headers, HttpStatus.OK);
when(rest.exchange(any(RequestEntity.class), eq(String.class)))
.thenReturn(entity);
}
private <T> T bean(Class<T> beanClass) {
return this.spring.getContext().getBean(beanClass);
}
private <T> T verifyBean(Class<T> beanClass) {
return verify(this.spring.getContext().getBean(beanClass));
}
private String json(String name) throws IOException {
return resource(name + ".json");
}
private String jwks(String name) throws IOException {
return resource(name + ".jwks");
}
private String token(String name) throws IOException {
return resource(name + ".token");
}
private String resource(String suffix) throws IOException {
String name = this.getClass().getSimpleName() + "-" + suffix;
ClassPathResource resource = new ClassPathResource(name, this.getClass());
try ( BufferedReader reader = new BufferedReader(new FileReader(resource.getFile())) ) {
return reader.lines().collect(Collectors.joining());
}
}
}
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/resource/OAuth2ResourceServerConfigurerTests.java
|
/*
* Copyright 2002-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.annotation.web.configurers.oauth2.server.resource;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.security.KeyFactory;
import java.security.interfaces.RSAPublicKey;
import java.security.spec.X509EncodedKeySpec;
import java.time.Clock;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.util.Base64;
import java.util.Collections;
import java.util.Map;
import java.util.stream.Collectors;
import javax.annotation.PreDestroy;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import org.hamcrest.core.AllOf;
import org.hamcrest.core.StringContains;
import org.hamcrest.core.StringEndsWith;
import org.hamcrest.core.StringStartsWith;
import org.junit.Rule;
import org.junit.Test;
import org.springframework.beans.factory.BeanCreationException;
import org.springframework.beans.factory.NoUniqueBeanDefinitionException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.ApplicationContext;
import org.springframework.context.EnvironmentAware;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.support.GenericApplicationContext;
import org.springframework.core.convert.converter.Converter;
import org.springframework.core.env.ConfigurableEnvironment;
import org.springframework.core.env.Environment;
import org.springframework.core.env.PropertySource;
import org.springframework.core.io.ClassPathResource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.RequestEntity;
import org.springframework.http.ResponseEntity;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.authentication.AbstractAuthenticationToken;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.AuthenticationManagerResolver;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.config.annotation.method.configuration.EnableGlobalMethodSecurity;
import org.springframework.security.config.annotation.web.HttpSecurityBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.config.test.SpringTestRule;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.GrantedAuthority;
import org.springframework.security.core.annotation.AuthenticationPrincipal;
import org.springframework.security.core.authority.SimpleGrantedAuthority;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.oauth2.core.DefaultOAuth2AuthenticatedPrincipal;
import org.springframework.security.oauth2.core.OAuth2Error;
import org.springframework.security.oauth2.core.OAuth2TokenValidator;
import org.springframework.security.oauth2.core.OAuth2TokenValidatorResult;
import org.springframework.security.oauth2.jwt.Jwt;
import org.springframework.security.oauth2.jwt.JwtClaimNames;
import org.springframework.security.oauth2.jwt.JwtDecoder;
import org.springframework.security.oauth2.jwt.JwtException;
import org.springframework.security.oauth2.jwt.JwtTimestampValidator;
import org.springframework.security.oauth2.jwt.NimbusJwtDecoder;
import org.springframework.security.oauth2.server.resource.authentication.BearerTokenAuthentication;
import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationConverter;
import org.springframework.security.oauth2.server.resource.authentication.JwtAuthenticationToken;
import org.springframework.security.oauth2.server.resource.introspection.NimbusOpaqueTokenIntrospector;
import org.springframework.security.oauth2.server.resource.introspection.OpaqueTokenIntrospector;
import org.springframework.security.oauth2.server.resource.web.BearerTokenAuthenticationEntryPoint;
import org.springframework.security.oauth2.server.resource.web.BearerTokenResolver;
import org.springframework.security.oauth2.server.resource.web.DefaultBearerTokenResolver;
import org.springframework.security.oauth2.server.resource.web.access.BearerTokenAccessDeniedHandler;
import org.springframework.security.provisioning.InMemoryUserDetailsManager;
import org.springframework.security.web.AuthenticationEntryPoint;
import org.springframework.security.web.access.AccessDeniedHandler;
import org.springframework.security.web.access.AccessDeniedHandlerImpl;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.MvcResult;
import org.springframework.test.web.servlet.ResultMatcher;
import org.springframework.test.web.servlet.request.RequestPostProcessor;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestOperations;
import org.springframework.web.context.support.GenericWebApplicationContext;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.core.StringStartsWith.startsWith;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.springframework.security.config.Customizer.withDefaults;
import static org.springframework.security.oauth2.core.TestOAuth2AccessTokens.noScopes;
import static org.springframework.security.oauth2.jwt.NimbusJwtDecoder.withJwkSetUri;
import static org.springframework.security.oauth2.jwt.NimbusJwtDecoder.withPublicKey;
import static org.springframework.security.oauth2.jwt.TestJwts.jwt;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf;
import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.httpBasic;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.header;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.redirectedUrl;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import static org.springframework.web.bind.annotation.RequestMethod.GET;
import static org.springframework.web.bind.annotation.RequestMethod.POST;
/**
* Tests for {@link OAuth2ResourceServerConfigurer}
*
* @author Josh Cummings
*/
public class OAuth2ResourceServerConfigurerTests {
private static final String JWT_TOKEN = "token";
private static final String JWT_SUBJECT = "mock-test-subject";
private static final Map<String, Object> JWT_CLAIMS = Collections.singletonMap(JwtClaimNames.SUB, JWT_SUBJECT);
private static final Jwt JWT = jwt().build();
private static final String JWK_SET_URI = "https://mock.org";
private static final JwtAuthenticationToken JWT_AUTHENTICATION_TOKEN =
new JwtAuthenticationToken(JWT, Collections.emptyList());
private static final String INTROSPECTION_URI = "https://idp.example.com";
private static final String CLIENT_ID = "client-id";
private static final String CLIENT_SECRET = "client-secret";
private static final BearerTokenAuthentication INTROSPECTION_AUTHENTICATION_TOKEN =
new BearerTokenAuthentication(new DefaultOAuth2AuthenticatedPrincipal(JWT_CLAIMS, Collections.emptyList()),
noScopes(), Collections.emptyList());
@Autowired(required = false)
MockMvc mvc;
@Autowired(required = false)
MockWebServer web;
@Rule
public final SpringTestRule spring = new SpringTestRule();
@Test
public void getWhenUsingDefaultsWithValidBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingDefaultsInLambdaWithValidBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultInLambdaConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingJwkSetUriThenAcceptsRequest() throws Exception {
this.spring.register(WebServerConfig.class, JwkSetUriConfig.class, BasicController.class).autowire();
mockWebServer(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingJwkSetUriInLambdaThenAcceptsRequest() throws Exception {
this.spring.register(WebServerConfig.class, JwkSetUriInLambdaConfig.class, BasicController.class).autowire();
mockWebServer(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("ok"));
}
@Test
public void getWhenUsingDefaultsWithExpiredBearerTokenThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("Expired");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsWithBadJwkEndpointThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations("malformed");
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt: Malformed Jwk set"));
}
@Test
public void getWhenUsingDefaultsWithUnavailableJwkEndpointThenInvalidToken()
throws Exception {
this.spring.register(WebServerConfig.class, JwkSetUriConfig.class).autowire();
this.web.shutdown();
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsWithMalformedBearerTokenThenInvalidToken()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(get("/").with(bearerToken("an\"invalid\"token")))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("Bearer token is malformed"));
}
@Test
public void getWhenUsingDefaultsWithMalformedPayloadThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("MalformedPayload");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt: Malformed payload"));
}
@Test
public void getWhenUsingDefaultsWithUnsignedBearerTokenThenInvalidToken()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
String token = this.token("Unsigned");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("Unsupported algorithm of none"));
}
@Test
public void getWhenUsingDefaultsWithBearerTokenBeforeNotBeforeThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
this.mockRestOperations(jwks("Default"));
String token = this.token("TooEarly");
this.mvc.perform(get("/").with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsWithBearerTokenInTwoPlacesThenInvalidRequest()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(get("/")
.with(bearerToken("token"))
.with(bearerToken("token").asParam()))
.andExpect(status().isBadRequest())
.andExpect(invalidRequestHeader("Found multiple bearer tokens in the request"));
}
@Test
public void getWhenUsingDefaultsWithBearerTokenInTwoParametersThenInvalidRequest()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
MultiValueMap<String, String> params = new LinkedMultiValueMap<>();
params.add("access_token", "token1");
params.add("access_token", "token2");
this.mvc.perform(get("/")
.params(params))
.andExpect(status().isBadRequest())
.andExpect(invalidRequestHeader("Found multiple bearer tokens in the request"));
}
@Test
public void postWhenUsingDefaultsWithBearerTokenAsFormParameterThenIgnoresToken()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(post("/") // engage csrf
.with(bearerToken("token").asParam()))
.andExpect(status().isForbidden())
.andExpect(header().doesNotExist(HttpHeaders.WWW_AUTHENTICATE));
}
@Test
public void postWhenCsrfDisabledWithBearerTokenAsFormParameterThenIgnoresToken()
throws Exception {
this.spring.register(CsrfDisabledConfig.class).autowire();
this.mvc.perform(post("/")
.with(bearerToken("token").asParam()))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, "Bearer"));
}
@Test
public void getWhenUsingDefaultsWithNoBearerTokenThenUnauthorized()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(get("/"))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, "Bearer"));
}
@Test
public void getWhenUsingDefaultsWithSufficientlyScopedBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScope");
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("SCOPE_message:read"));
}
@Test
public void getWhenUsingDefaultsWithInsufficientScopeThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingDefaultsWithInsufficientScpThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageWriteScp");
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingDefaultsAndAuthorizationServerHasNoMatchingKeyThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations(jwks("Empty"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
@Test
public void getWhenUsingDefaultsAndAuthorizationServerHasMultipleMatchingKeysThenOk()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("TwoKeys"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void getWhenUsingDefaultsAndKeyMatchesByKidThenOk()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("TwoKeys"));
String token = this.token("Kid");
this.mvc.perform(get("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
// -- Method Security
@Test
public void getWhenUsingMethodSecurityWithValidBearerTokenThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScope");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("SCOPE_message:read"));
}
@Test
public void getWhenUsingMethodSecurityWithValidBearerTokenHavingScpAttributeThenAcceptsRequest()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScp");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("SCOPE_message:read"));
}
@Test
public void getWhenUsingMethodSecurityWithInsufficientScopeThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingMethodSecurityWithInsufficientScpThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageWriteScp");
this.mvc.perform(get("/ms-requires-read-scope")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
@Test
public void getWhenUsingMethodSecurityWithDenyAllThenInsufficientScopeError()
throws Exception {
this.spring.register(RestOperationsConfig.class, MethodSecurityConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidMessageReadScope");
this.mvc.perform(get("/ms-deny")
.with(bearerToken(token)))
.andExpect(status().isForbidden())
.andExpect(insufficientScopeHeader());
}
// -- Resource Server should not engage csrf
@Test
public void postWhenUsingDefaultsWithValidBearerTokenAndNoCsrfTokenThenOk()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(post("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void postWhenUsingDefaultsWithNoBearerTokenThenCsrfDenies()
throws Exception {
this.spring.register(JwkSetUriConfig.class).autowire();
this.mvc.perform(post("/authenticated"))
.andExpect(status().isForbidden())
.andExpect(header().doesNotExist(HttpHeaders.WWW_AUTHENTICATE));
}
@Test
public void postWhenUsingDefaultsWithExpiredBearerTokenAndNoCsrfThenInvalidToken()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("Expired");
this.mvc.perform(post("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("An error occurred while attempting to decode the Jwt"));
}
// -- Resource Server should not create sessions
@Test
public void requestWhenDefaultConfiguredThenSessionIsNotCreated()
throws Exception {
this.spring.register(RestOperationsConfig.class, DefaultConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
MvcResult result = this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNull();
}
@Test
public void requestWhenIntrospectionConfiguredThenSessionIsNotCreated()
throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class, BasicController.class).autowire();
mockRestOperations(json("Active"));
MvcResult result = this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"))
.andReturn();
assertThat(result.getRequest().getSession(false)).isNull();
}
@Test
public void requestWhenUsingDefaultsAndNoBearerTokenThenSessionIsCreated()
throws Exception {
this.spring.register(JwkSetUriConfig.class, BasicController.class).autowire();
MvcResult result = this.mvc.perform(get("/"))
.andExpect(status().isUnauthorized())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNotNull();
}
@Test
public void requestWhenSessionManagementConfiguredThenUserConfigurationOverrides()
throws Exception {
this.spring.register(RestOperationsConfig.class, AlwaysSessionCreationConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
MvcResult result = this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNotNull();
}
// -- custom bearer token resolver
@Test
public void requestWhenBearerTokenResolverAllowsRequestBodyThenEitherHeaderOrRequestBodyIsAccepted()
throws Exception {
this.spring.register(AllowBearerTokenInRequestBodyConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
this.mvc.perform(post("/authenticated")
.param("access_token", JWT_TOKEN))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenBearerTokenResolverAllowsQueryParameterThenEitherHeaderOrQueryParameterIsAccepted()
throws Exception {
this.spring.register(AllowBearerTokenAsQueryParameterConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
this.mvc.perform(get("/authenticated")
.param("access_token", JWT_TOKEN))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenBearerTokenResolverAllowsRequestBodyAndRequestContainsTwoTokensThenInvalidRequest()
throws Exception {
this.spring.register(AllowBearerTokenInRequestBodyConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(post("/authenticated")
.param("access_token", JWT_TOKEN)
.with(bearerToken(JWT_TOKEN))
.with(csrf()))
.andExpect(status().isBadRequest())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("invalid_request")));
}
@Test
public void requestWhenBearerTokenResolverAllowsQueryParameterAndRequestContainsTwoTokensThenInvalidRequest()
throws Exception {
this.spring.register(AllowBearerTokenAsQueryParameterConfig.class, JwtDecoderConfig.class,
BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN))
.param("access_token", JWT_TOKEN))
.andExpect(status().isBadRequest())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("invalid_request")));
}
@Test
public void getBearerTokenResolverWhenDuplicateResolverBeansAndAnotherOnTheDslThenTheDslOneIsUsed() {
BearerTokenResolver resolverBean = mock(BearerTokenResolver.class);
BearerTokenResolver resolver = mock(BearerTokenResolver.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean("resolverOne", BearerTokenResolver.class, () -> resolverBean);
context.registerBean("resolverTwo", BearerTokenResolver.class, () -> resolverBean);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer oauth2 = new OAuth2ResourceServerConfigurer(context);
oauth2.bearerTokenResolver(resolver);
assertThat(oauth2.getBearerTokenResolver()).isEqualTo(resolver);
}
@Test
public void getBearerTokenResolverWhenDuplicateResolverBeansThenWiringException() {
assertThatCode(() -> this.spring.register(MultipleBearerTokenResolverBeansConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasRootCauseInstanceOf(NoUniqueBeanDefinitionException.class);
}
@Test
public void getBearerTokenResolverWhenResolverBeanAndAnotherOnTheDslThenTheDslOneIsUsed() {
BearerTokenResolver resolver = mock(BearerTokenResolver.class);
BearerTokenResolver resolverBean = mock(BearerTokenResolver.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean(BearerTokenResolver.class, () -> resolverBean);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer oauth2 = new OAuth2ResourceServerConfigurer(context);
oauth2.bearerTokenResolver(resolver);
assertThat(oauth2.getBearerTokenResolver()).isEqualTo(resolver);
}
@Test
public void getBearerTokenResolverWhenNoResolverSpecifiedThenTheDefaultIsUsed() {
ApplicationContext context =
this.spring.context(new GenericWebApplicationContext()).getContext();
OAuth2ResourceServerConfigurer oauth2 = new OAuth2ResourceServerConfigurer(context);
assertThat(oauth2.getBearerTokenResolver()).isInstanceOf(DefaultBearerTokenResolver.class);
}
// -- custom jwt decoder
@Test
public void requestWhenCustomJwtDecoderWiredOnDslThenUsed()
throws Exception {
this.spring.register(CustomJwtDecoderOnDsl.class, BasicController.class).autowire();
CustomJwtDecoderOnDsl config = this.spring.getContext().getBean(CustomJwtDecoderOnDsl.class);
JwtDecoder decoder = config.decoder();
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenCustomJwtDecoderInLambdaOnDslThenUsed()
throws Exception {
this.spring.register(CustomJwtDecoderInLambdaOnDsl.class, BasicController.class).autowire();
CustomJwtDecoderInLambdaOnDsl config = this.spring.getContext().getBean(CustomJwtDecoderInLambdaOnDsl.class);
JwtDecoder decoder = config.decoder();
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void requestWhenCustomJwtDecoderExposedAsBeanThenUsed()
throws Exception {
this.spring.register(CustomJwtDecoderAsBean.class, BasicController.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk())
.andExpect(content().string(JWT_SUBJECT));
}
@Test
public void getJwtDecoderWhenConfiguredWithDecoderAndJwkSetUriThenLastOneWins() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
JwtDecoder decoder = mock(JwtDecoder.class);
jwtConfigurer.jwkSetUri(JWK_SET_URI);
jwtConfigurer.decoder(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isEqualTo(decoder);
jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.decoder(decoder);
jwtConfigurer.jwkSetUri(JWK_SET_URI);
assertThat(jwtConfigurer.getJwtDecoder()).isInstanceOf(NimbusJwtDecoder.class);
}
@Test
public void getJwtDecoderWhenConflictingJwtDecodersThenTheDslWiredOneTakesPrecedence() {
JwtDecoder decoderBean = mock(JwtDecoder.class);
JwtDecoder decoder = mock(JwtDecoder.class);
ApplicationContext context = mock(ApplicationContext.class);
when(context.getBean(JwtDecoder.class)).thenReturn(decoderBean);
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.decoder(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isEqualTo(decoder);
}
@Test
public void getJwtDecoderWhenContextHasBeanAndUserConfiguresJwkSetUriThenJwkSetUriTakesPrecedence() {
JwtDecoder decoder = mock(JwtDecoder.class);
ApplicationContext context = mock(ApplicationContext.class);
when(context.getBean(JwtDecoder.class)).thenReturn(decoder);
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.jwkSetUri(JWK_SET_URI);
assertThat(jwtConfigurer.getJwtDecoder()).isNotEqualTo(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isInstanceOf(NimbusJwtDecoder.class);
}
@Test
public void getJwtDecoderWhenTwoJwtDecoderBeansAndAnotherWiredOnDslThenDslWiredOneTakesPrecedence() {
JwtDecoder decoderBean = mock(JwtDecoder.class);
JwtDecoder decoder = mock(JwtDecoder.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean("decoderOne", JwtDecoder.class, () -> decoderBean);
context.registerBean("decoderTwo", JwtDecoder.class, () -> decoderBean);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
jwtConfigurer.decoder(decoder);
assertThat(jwtConfigurer.getJwtDecoder()).isEqualTo(decoder);
}
@Test
public void getJwtDecoderWhenTwoJwtDecoderBeansThenThrowsException() {
JwtDecoder decoder = mock(JwtDecoder.class);
GenericWebApplicationContext context = new GenericWebApplicationContext();
context.registerBean("decoderOne", JwtDecoder.class, () -> decoder);
context.registerBean("decoderTwo", JwtDecoder.class, () -> decoder);
this.spring.context(context).autowire();
OAuth2ResourceServerConfigurer.JwtConfigurer jwtConfigurer =
new OAuth2ResourceServerConfigurer(context).jwt();
assertThatCode(() -> jwtConfigurer.getJwtDecoder())
.isInstanceOf(NoUniqueBeanDefinitionException.class);
}
// -- exception handling
@Test
public void requestWhenRealmNameConfiguredThenUsesOnUnauthenticated()
throws Exception {
this.spring.register(RealmNameConfiguredOnEntryPoint.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenThrow(JwtException.class);
this.mvc.perform(get("/authenticated")
.with(bearerToken("invalid_token")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer realm=\"myRealm\"")));
}
@Test
public void requestWhenRealmNameConfiguredThenUsesOnAccessDenied()
throws Exception {
this.spring.register(RealmNameConfiguredOnAccessDeniedHandler.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(bearerToken("insufficiently_scoped")))
.andExpect(status().isForbidden())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer realm=\"myRealm\"")));
}
@Test
public void authenticationEntryPointWhenGivenNullThenThrowsException() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer configurer = new OAuth2ResourceServerConfigurer(context);
assertThatCode(() -> configurer.authenticationEntryPoint(null))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
public void accessDeniedHandlerWhenGivenNullThenThrowsException() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer configurer = new OAuth2ResourceServerConfigurer(context);
assertThatCode(() -> configurer.accessDeniedHandler(null))
.isInstanceOf(IllegalArgumentException.class);
}
// -- token validator
@Test
public void requestWhenCustomJwtValidatorFailsThenCorrespondingErrorMessage()
throws Exception {
this.spring.register(RestOperationsConfig.class, CustomJwtValidatorConfig.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
OAuth2TokenValidator<Jwt> jwtValidator =
this.spring.getContext().getBean(CustomJwtValidatorConfig.class)
.getJwtValidator();
OAuth2Error error = new OAuth2Error("custom-error", "custom-description", "custom-uri");
when(jwtValidator.validate(any(Jwt.class))).thenReturn(OAuth2TokenValidatorResult.failure(error));
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("custom-description")));
}
@Test
public void requestWhenClockSkewSetThenTimestampWindowRelaxedAccordingly()
throws Exception {
this.spring.register(RestOperationsConfig.class, UnexpiredJwtClockSkewConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ExpiresAt4687177990");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk());
}
@Test
public void requestWhenClockSkewSetButJwtStillTooLateThenReportsExpired()
throws Exception {
this.spring.register(RestOperationsConfig.class, ExpiredJwtClockSkewConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ExpiresAt4687177990");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isUnauthorized())
.andExpect(invalidTokenHeader("Jwt expired at"));
}
// -- converter
@Test
public void requestWhenJwtAuthenticationConverterConfiguredOnDslThenIsUsed()
throws Exception {
this.spring.register(JwtDecoderConfig.class, JwtAuthenticationConverterConfiguredOnDsl.class,
BasicController.class).autowire();
Converter<Jwt, JwtAuthenticationToken> jwtAuthenticationConverter =
this.spring.getContext().getBean(JwtAuthenticationConverterConfiguredOnDsl.class)
.getJwtAuthenticationConverter();
when(jwtAuthenticationConverter.convert(JWT)).thenReturn(JWT_AUTHENTICATION_TOKEN);
JwtDecoder jwtDecoder = this.spring.getContext().getBean(JwtDecoder.class);
when(jwtDecoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk());
verify(jwtAuthenticationConverter).convert(JWT);
}
@Test
public void requestWhenJwtAuthenticationConverterCustomizedAuthoritiesThenThoseAuthoritiesArePropagated()
throws Exception {
this.spring.register(JwtDecoderConfig.class, CustomAuthorityMappingConfig.class, BasicController.class)
.autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(JWT_TOKEN)).thenReturn(JWT);
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken(JWT_TOKEN)))
.andExpect(status().isOk());
}
// -- single key
@Test
public void requestWhenUsingPublicKeyAndValidTokenThenAuthenticates()
throws Exception {
this.spring.register(SingleKeyConfig.class, BasicController.class).autowire();
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(status().isOk());
}
@Test
public void requestWhenUsingPublicKeyAndSignatureFailsThenReturnsInvalidToken()
throws Exception {
this.spring.register(SingleKeyConfig.class).autowire();
String token = this.token("WrongSignature");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(invalidTokenHeader("signature"));
}
@Test
public void requestWhenUsingPublicKeyAlgorithmDoesNotMatchThenReturnsInvalidToken()
throws Exception {
this.spring.register(SingleKeyConfig.class).autowire();
String token = this.token("WrongAlgorithm");
this.mvc.perform(get("/")
.with(bearerToken(token)))
.andExpect(invalidTokenHeader("algorithm"));
}
@Test
public void getWhenCustomJwtAuthenticationManagerThenUsed() throws Exception {
this.spring.register(JwtAuthenticationManagerConfig.class, BasicController.class).autowire();
when(bean(AuthenticationProvider.class).authenticate(any(Authentication.class)))
.thenReturn(JWT_AUTHENTICATION_TOKEN);
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("mock-test-subject"));
verifyBean(AuthenticationProvider.class).authenticate(any(Authentication.class));
}
// -- opaque
@Test
public void getWhenIntrospectingThenOk() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class, BasicController.class).autowire();
mockRestOperations(json("Active"));
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void getWhenOpaqueTokenInLambdaAndIntrospectingThenOk() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenInLambdaConfig.class, BasicController.class).autowire();
mockRestOperations(json("Active"));
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
}
@Test
public void getWhenIntrospectionFailsThenUnauthorized() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class).autowire();
mockRestOperations(json("Inactive"));
this.mvc.perform(get("/")
.with(bearerToken("token")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE,
containsString("Provided token [token] isn't active")));
}
@Test
public void getWhenIntrospectionLacksScopeThenForbidden() throws Exception {
this.spring.register(RestOperationsConfig.class, OpaqueTokenConfig.class).autowire();
mockRestOperations(json("ActiveNoScopes"));
this.mvc.perform(get("/requires-read-scope")
.with(bearerToken("token")))
.andExpect(status().isForbidden())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, containsString("scope")));
}
@Test
public void getWhenCustomIntrospectionAuthenticationManagerThenUsed() throws Exception {
this.spring.register(OpaqueTokenAuthenticationManagerConfig.class, BasicController.class).autowire();
when(bean(AuthenticationProvider.class).authenticate(any(Authentication.class)))
.thenReturn(INTROSPECTION_AUTHENTICATION_TOKEN);
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("mock-test-subject"));
verifyBean(AuthenticationProvider.class).authenticate(any(Authentication.class));
}
@Test
public void getWhenCustomIntrospectionAuthenticationManagerInLambdaThenUsed() throws Exception {
this.spring.register(OpaqueTokenAuthenticationManagerInLambdaConfig.class, BasicController.class).autowire();
when(bean(AuthenticationProvider.class).authenticate(any(Authentication.class)))
.thenReturn(INTROSPECTION_AUTHENTICATION_TOKEN);
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isOk())
.andExpect(content().string("mock-test-subject"));
verifyBean(AuthenticationProvider.class).authenticate(any(Authentication.class));
}
@Test
public void configureWhenOnlyIntrospectionUrlThenException() {
assertThatCode(() -> this.spring.register(OpaqueTokenHalfConfiguredConfig.class).autowire())
.isInstanceOf(BeanCreationException.class);
}
@Test
public void getIntrospectionClientWhenConfiguredWithClientAndIntrospectionUriThenLastOneWins() {
ApplicationContext context = mock(ApplicationContext.class);
OAuth2ResourceServerConfigurer.OpaqueTokenConfigurer opaqueTokenConfigurer =
new OAuth2ResourceServerConfigurer(context).opaqueToken();
OpaqueTokenIntrospector client = mock(OpaqueTokenIntrospector.class);
opaqueTokenConfigurer.introspectionUri(INTROSPECTION_URI);
opaqueTokenConfigurer.introspectionClientCredentials(CLIENT_ID, CLIENT_SECRET);
opaqueTokenConfigurer.introspector(client);
assertThat(opaqueTokenConfigurer.getIntrospector()).isEqualTo(client);
opaqueTokenConfigurer =
new OAuth2ResourceServerConfigurer(context).opaqueToken();
opaqueTokenConfigurer.introspector(client);
opaqueTokenConfigurer.introspectionUri(INTROSPECTION_URI);
opaqueTokenConfigurer.introspectionClientCredentials(CLIENT_ID, CLIENT_SECRET);
assertThat(opaqueTokenConfigurer.getIntrospector())
.isInstanceOf(NimbusOpaqueTokenIntrospector.class);
}
@Test
public void getIntrospectionClientWhenDslAndBeanWiredThenDslTakesPrecedence() {
GenericApplicationContext context = new GenericApplicationContext();
registerMockBean(context, "introspectionClientOne", OpaqueTokenIntrospector.class);
registerMockBean(context, "introspectionClientTwo", OpaqueTokenIntrospector.class);
OAuth2ResourceServerConfigurer.OpaqueTokenConfigurer opaqueToken =
new OAuth2ResourceServerConfigurer(context).opaqueToken();
opaqueToken.introspectionUri(INTROSPECTION_URI);
opaqueToken.introspectionClientCredentials(CLIENT_ID, CLIENT_SECRET);
assertThat(opaqueToken.getIntrospector()).isNotNull();
}
// -- In combination with other authentication providers
@Test
public void requestWhenBasicAndResourceServerEntryPointsThenMatchedByRequest()
throws Exception {
this.spring.register(BasicAndResourceServerConfig.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenThrow(JwtException.class);
this.mvc.perform(get("/authenticated")
.with(httpBasic("some", "user")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Basic")));
this.mvc.perform(get("/authenticated"))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Basic")));
this.mvc.perform(get("/authenticated")
.with(bearerToken("invalid_token")))
.andExpect(status().isUnauthorized())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer")));
}
@Test
public void requestWhenFormLoginAndResourceServerEntryPointsThenSessionCreatedByRequest()
throws Exception {
this.spring.register(FormAndResourceServerConfig.class, JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenThrow(JwtException.class);
MvcResult result =
this.mvc.perform(get("/authenticated"))
.andExpect(status().isFound())
.andExpect(redirectedUrl("http://localhost/login"))
.andReturn();
assertThat(result.getRequest().getSession(false)).isNotNull();
result =
this.mvc.perform(get("/authenticated")
.with(bearerToken("token")))
.andExpect(status().isUnauthorized())
.andReturn();
assertThat(result.getRequest().getSession(false)).isNull();
}
@Test
public void requestWhenDefaultAndResourceServerAccessDeniedHandlersThenMatchedByRequest()
throws Exception {
this.spring.register(ExceptionHandlingAndResourceServerWithAccessDeniedHandlerConfig.class,
JwtDecoderConfig.class).autowire();
JwtDecoder decoder = this.spring.getContext().getBean(JwtDecoder.class);
when(decoder.decode(anyString())).thenReturn(JWT);
this.mvc.perform(get("/authenticated")
.with(httpBasic("basic-user", "basic-password")))
.andExpect(status().isForbidden())
.andExpect(header().doesNotExist(HttpHeaders.WWW_AUTHENTICATE));
this.mvc.perform(get("/authenticated")
.with(bearerToken("insufficiently_scoped")))
.andExpect(status().isForbidden())
.andExpect(header().string(HttpHeaders.WWW_AUTHENTICATE, startsWith("Bearer")));
}
@Test
public void getWhenAlsoUsingHttpBasicThenCorrectProviderEngages()
throws Exception {
this.spring.register(RestOperationsConfig.class, BasicAndResourceServerConfig.class, BasicController.class).autowire();
mockRestOperations(jwks("Default"));
String token = this.token("ValidNoScopes");
this.mvc.perform(get("/authenticated")
.with(bearerToken(token)))
.andExpect(status().isOk())
.andExpect(content().string("test-subject"));
this.mvc.perform(get("/authenticated")
.with(httpBasic("basic-user", "basic-password")))
.andExpect(status().isOk())
.andExpect(content().string("basic-user"));
}
// -- authentication manager
@Test
public void getAuthenticationManagerWhenConfiguredAuthenticationManagerThenTakesPrecedence() {
ApplicationContext context = mock(ApplicationContext.class);
HttpSecurityBuilder http = mock(HttpSecurityBuilder.class);
OAuth2ResourceServerConfigurer oauth2ResourceServer = new OAuth2ResourceServerConfigurer(context);
AuthenticationManager authenticationManager = mock(AuthenticationManager.class);
oauth2ResourceServer
.jwt()
.authenticationManager(authenticationManager)
.decoder(mock(JwtDecoder.class));
assertThat(oauth2ResourceServer.getAuthenticationManager(http)).isSameAs(authenticationManager);
oauth2ResourceServer = new OAuth2ResourceServerConfigurer(context);
oauth2ResourceServer
.opaqueToken()
.authenticationManager(authenticationManager)
.introspector(mock(OpaqueTokenIntrospector.class));
assertThat(oauth2ResourceServer.getAuthenticationManager(http)).isSameAs(authenticationManager);
verify(http, never()).authenticationProvider(any(AuthenticationProvider.class));
}
// -- Incorrect Configuration
@Test
public void configuredWhenMissingJwtAuthenticationProviderThenWiringException() {
assertThatCode(() -> this.spring.register(JwtlessConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("neither was found");
}
@Test
public void configureWhenMissingJwkSetUriThenWiringException() {
assertThatCode(() -> this.spring.register(JwtHalfConfiguredConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("No qualifying bean of type");
}
@Test
public void configureWhenUsingBothJwtAndOpaqueThenWiringException() {
assertThatCode(() -> this.spring.register(OpaqueAndJwtConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("Spring Security only supports JWTs or Opaque Tokens");
}
@Test
public void configureWhenUsingBothAuthenticationManagerResolverAndOpaqueThenWiringException() {
assertThatCode(() -> this.spring.register(AuthenticationManagerResolverPlusOtherConfig.class).autowire())
.isInstanceOf(BeanCreationException.class)
.hasMessageContaining("authenticationManagerResolver");
}
// -- support
@EnableWebSecurity
static class DefaultConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class DefaultInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.jwt(withDefaults())
);
// @formatter:on
}
}
@EnableWebSecurity
static class JwkSetUriConfig extends WebSecurityConfigurerAdapter {
@Value("${mockwebserver.url:https://example.org}")
String jwkSetUri;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.jwkSetUri(this.jwkSetUri);
// @formatter:on
}
}
@EnableWebSecurity
static class JwkSetUriInLambdaConfig extends WebSecurityConfigurerAdapter {
@Value("${mockwebserver.url:https://example.org}")
String jwkSetUri;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.jwt(jwt ->
jwt
.jwkSetUri(this.jwkSetUri)
)
);
// @formatter:on
}
}
@EnableWebSecurity
static class CsrfDisabledConfig extends WebSecurityConfigurerAdapter {
@Value("${mockwebserver.url:https://example.org}")
String jwkSetUri;
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('SCOPE_message:read')")
.anyRequest().authenticated()
.and()
.csrf().disable()
.oauth2ResourceServer()
.jwt()
.jwkSetUri(this.jwkSetUri);
// @formatter:on
}
}
@EnableWebSecurity
@EnableGlobalMethodSecurity(prePostEnabled = true)
static class MethodSecurityConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class JwtlessConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer();
// @formatter:on
}
}
@EnableWebSecurity
static class RealmNameConfiguredOnEntryPoint extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.authenticationEntryPoint(authenticationEntryPoint())
.jwt();
// @formatter:on
}
AuthenticationEntryPoint authenticationEntryPoint() {
BearerTokenAuthenticationEntryPoint entryPoint =
new BearerTokenAuthenticationEntryPoint();
entryPoint.setRealmName("myRealm");
return entryPoint;
}
}
@EnableWebSecurity
static class RealmNameConfiguredOnAccessDeniedHandler extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().denyAll()
.and()
.oauth2ResourceServer()
.accessDeniedHandler(accessDeniedHandler())
.jwt();
// @formatter:on
}
AccessDeniedHandler accessDeniedHandler() {
BearerTokenAccessDeniedHandler accessDeniedHandler =
new BearerTokenAccessDeniedHandler();
accessDeniedHandler.setRealmName("myRealm");
return accessDeniedHandler;
}
}
@EnableWebSecurity
static class ExceptionHandlingAndResourceServerWithAccessDeniedHandlerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().denyAll()
.and()
.exceptionHandling()
.defaultAccessDeniedHandlerFor(new AccessDeniedHandlerImpl(), request -> false)
.and()
.httpBasic()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
public UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(
org.springframework.security.core.userdetails.User.withDefaultPasswordEncoder()
.username("basic-user")
.password("basic-password")
.roles("USER")
.build());
}
}
@EnableWebSecurity
static class JwtAuthenticationConverterConfiguredOnDsl extends WebSecurityConfigurerAdapter {
private final Converter<Jwt, JwtAuthenticationToken> jwtAuthenticationConverter = mock(Converter.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.jwtAuthenticationConverter(getJwtAuthenticationConverter());
// @formatter:on
}
Converter<Jwt, JwtAuthenticationToken> getJwtAuthenticationConverter() {
return this.jwtAuthenticationConverter;
}
}
@EnableWebSecurity
static class CustomAuthorityMappingConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").access("hasAuthority('message:read')")
.and()
.oauth2ResourceServer()
.jwt()
.jwtAuthenticationConverter(getJwtAuthenticationConverter());
// @formatter:on
}
Converter<Jwt, AbstractAuthenticationToken> getJwtAuthenticationConverter() {
JwtAuthenticationConverter converter = new JwtAuthenticationConverter();
converter.setJwtGrantedAuthoritiesConverter(jwt ->
Collections.singletonList(new SimpleGrantedAuthority("message:read"))
);
return converter;
}
}
@EnableWebSecurity
static class BasicAndResourceServerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.httpBasic()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
public UserDetailsService userDetailsService() {
return new InMemoryUserDetailsManager(
org.springframework.security.core.userdetails.User.withDefaultPasswordEncoder()
.username("basic-user")
.password("basic-password")
.roles("USER")
.build());
}
}
@EnableWebSecurity
static class FormAndResourceServerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.formLogin()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class JwtHalfConfiguredConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt(); // missing key configuration, e.g. jwkSetUri
// @formatter:on
}
}
@EnableWebSecurity
static class AlwaysSessionCreationConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.sessionManagement()
.sessionCreationPolicy(SessionCreationPolicy.ALWAYS)
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class AllowBearerTokenInRequestBodyConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.bearerTokenResolver(allowRequestBody())
.jwt();
// @formatter:on
}
private BearerTokenResolver allowRequestBody() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowFormEncodedBodyParameter(true);
return resolver;
}
}
@EnableWebSecurity
static class AllowBearerTokenAsQueryParameterConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
BearerTokenResolver allowQueryParameter() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowUriQueryParameter(true);
return resolver;
}
}
@EnableWebSecurity
static class MultipleBearerTokenResolverBeansConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
BearerTokenResolver resolverOne() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowUriQueryParameter(true);
return resolver;
}
@Bean
BearerTokenResolver resolverTwo() {
DefaultBearerTokenResolver resolver = new DefaultBearerTokenResolver();
resolver.setAllowFormEncodedBodyParameter(true);
return resolver;
}
}
@EnableWebSecurity
static class CustomJwtDecoderOnDsl extends WebSecurityConfigurerAdapter {
JwtDecoder decoder = mock(JwtDecoder.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.decoder(decoder());
// @formatter:on
}
JwtDecoder decoder() {
return this.decoder;
}
}
@EnableWebSecurity
static class CustomJwtDecoderInLambdaOnDsl extends WebSecurityConfigurerAdapter {
JwtDecoder decoder = mock(JwtDecoder.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.jwt(jwt ->
jwt
.decoder(decoder())
)
);
// @formatter:on
}
JwtDecoder decoder() {
return this.decoder;
}
}
@EnableWebSecurity
static class CustomJwtDecoderAsBean extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
public JwtDecoder decoder() {
return mock(JwtDecoder.class);
}
}
@EnableWebSecurity
static class JwtAuthenticationManagerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt()
.authenticationManager(authenticationProvider()::authenticate);
// @formatter:on
}
@Bean
public AuthenticationProvider authenticationProvider() {
return mock(AuthenticationProvider.class);
}
}
@EnableWebSecurity
static class CustomJwtValidatorConfig extends WebSecurityConfigurerAdapter {
@Autowired
NimbusJwtDecoder jwtDecoder;
private final OAuth2TokenValidator<Jwt> jwtValidator = mock(OAuth2TokenValidator.class);
@Override
protected void configure(HttpSecurity http) throws Exception {
this.jwtDecoder.setJwtValidator(this.jwtValidator);
// @formatter:off
http
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
public OAuth2TokenValidator<Jwt> getJwtValidator() {
return this.jwtValidator;
}
}
@EnableWebSecurity
static class UnexpiredJwtClockSkewConfig extends WebSecurityConfigurerAdapter {
@Autowired
NimbusJwtDecoder jwtDecoder;
@Override
protected void configure(HttpSecurity http) throws Exception {
Clock nearlyAnHourFromTokenExpiry =
Clock.fixed(Instant.ofEpochMilli(4687181540000L), ZoneId.systemDefault());
JwtTimestampValidator jwtValidator = new JwtTimestampValidator(Duration.ofHours(1));
jwtValidator.setClock(nearlyAnHourFromTokenExpiry);
this.jwtDecoder.setJwtValidator(jwtValidator);
// @formatter:off
http
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
}
@EnableWebSecurity
static class ExpiredJwtClockSkewConfig extends WebSecurityConfigurerAdapter {
@Autowired
NimbusJwtDecoder jwtDecoder;
@Override
protected void configure(HttpSecurity http) throws Exception {
Clock justOverOneHourAfterExpiry =
Clock.fixed(Instant.ofEpochMilli(4687181595000L), ZoneId.systemDefault());
JwtTimestampValidator jwtValidator = new JwtTimestampValidator(Duration.ofHours(1));
jwtValidator.setClock(justOverOneHourAfterExpiry);
this.jwtDecoder.setJwtValidator(jwtValidator);
// @formatter:off
http
.oauth2ResourceServer()
.jwt();
}
}
@EnableWebSecurity
static class SingleKeyConfig extends WebSecurityConfigurerAdapter {
byte[] spec = Base64.getDecoder().decode(
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoXJ8OyOv/eRnce4akdan" +
"R4KYRfnC2zLV4uYNQpcFn6oHL0dj7D6kxQmsXoYgJV8ZVDn71KGmuLvolxsDncc2" +
"UrhyMBY6DVQVgMSVYaPCTgW76iYEKGgzTEw5IBRQL9w3SRJWd3VJTZZQjkXef48O" +
"cz06PGF3lhbz4t5UEZtdF4rIe7u+977QwHuh7yRPBQ3sII+cVoOUMgaXB9SHcGF2" +
"iZCtPzL/IffDUcfhLQteGebhW8A6eUHgpD5A1PQ+JCw/G7UOzZAjjDjtNM2eqm8j" +
"+Ms/gqnm4MiCZ4E+9pDN77CAAPVN7kuX6ejs9KBXpk01z48i9fORYk9u7rAkh1Hu" +
"QwIDAQAB");
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.jwt();
// @formatter:on
}
@Bean
JwtDecoder decoder() throws Exception {
RSAPublicKey publicKey = (RSAPublicKey)
KeyFactory.getInstance("RSA").generatePublic(new X509EncodedKeySpec(this.spec));
return withPublicKey(publicKey).build();
}
}
@EnableWebSecurity
static class OpaqueTokenConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.antMatchers("/requires-read-scope").hasAuthority("SCOPE_message:read")
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.opaqueToken();
// @formatter:on
}
}
@EnableWebSecurity
static class OpaqueTokenInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.antMatchers("/requires-read-scope").hasAuthority("SCOPE_message:read")
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.opaqueToken(withDefaults())
);
// @formatter:on
}
}
@EnableWebSecurity
static class OpaqueTokenAuthenticationManagerConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.opaqueToken()
.authenticationManager(authenticationProvider()::authenticate);
// @formatter:on
}
@Bean
public AuthenticationProvider authenticationProvider() {
return mock(AuthenticationProvider.class);
}
}
@EnableWebSecurity
static class OpaqueTokenAuthenticationManagerInLambdaConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests(authorizeRequests ->
authorizeRequests
.anyRequest().authenticated()
)
.oauth2ResourceServer(oauth2ResourceServer ->
oauth2ResourceServer
.opaqueToken(opaqueToken ->
opaqueToken
.authenticationManager(authenticationProvider()::authenticate)
)
);
// @formatter:on
}
@Bean
public AuthenticationProvider authenticationProvider() {
return mock(AuthenticationProvider.class);
}
}
@EnableWebSecurity
static class OpaqueAndJwtConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.oauth2ResourceServer()
.jwt()
.and()
.opaqueToken();
}
}
@EnableWebSecurity
static class OpaqueTokenHalfConfiguredConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.opaqueToken()
.introspectionUri("https://idp.example.com"); // missing credentials
// @formatter:on
}
}
@EnableWebSecurity
static class AuthenticationManagerResolverPlusOtherConfig extends WebSecurityConfigurerAdapter {
@Override
protected void configure(HttpSecurity http) throws Exception {
// @formatter:off
http
.authorizeRequests()
.anyRequest().authenticated()
.and()
.oauth2ResourceServer()
.authenticationManagerResolver(mock(AuthenticationManagerResolver.class))
.opaqueToken();
}
}
@Configuration
static class JwtDecoderConfig {
@Bean
public JwtDecoder jwtDecoder() {
return mock(JwtDecoder.class);
}
}
@RestController
static class BasicController {
@GetMapping("/")
public String get() {
return "ok";
}
@PostMapping("/post")
public String post() {
return "post";
}
@RequestMapping(value = "/authenticated", method = { GET, POST })
public String authenticated(@AuthenticationPrincipal Authentication authentication) {
return authentication.getName();
}
@GetMapping("/requires-read-scope")
public String requiresReadScope(@AuthenticationPrincipal JwtAuthenticationToken token) {
return token.getAuthorities().stream()
.map(GrantedAuthority::getAuthority)
.filter(auth -> auth.endsWith("message:read"))
.findFirst().orElse(null);
}
@GetMapping("/ms-requires-read-scope")
@PreAuthorize("hasAuthority('SCOPE_message:read')")
public String msRequiresReadScope(@AuthenticationPrincipal JwtAuthenticationToken token) {
return requiresReadScope(token);
}
@GetMapping("/ms-deny")
@PreAuthorize("denyAll")
public String deny() {
return "hmm, that's odd";
}
}
@Configuration
static class WebServerConfig implements BeanPostProcessor, EnvironmentAware {
private final MockWebServer server = new MockWebServer();
@PreDestroy
public void shutdown() throws IOException {
this.server.shutdown();
}
@Override
public void setEnvironment(Environment environment) {
if (environment instanceof ConfigurableEnvironment) {
((ConfigurableEnvironment) environment)
.getPropertySources().addFirst(new MockWebServerPropertySource());
}
}
@Bean
public MockWebServer web() {
return this.server;
}
private class MockWebServerPropertySource extends PropertySource {
MockWebServerPropertySource() {
super("mockwebserver");
}
@Override
public Object getProperty(String name) {
if ("mockwebserver.url".equals(name)) {
return WebServerConfig.this.server.url("/.well-known/jwks.json").toString();
} else {
return null;
}
}
}
}
@Configuration
static class RestOperationsConfig {
RestOperations rest = mock(RestOperations.class);
@Bean
RestOperations rest() {
return this.rest;
}
@Bean
NimbusJwtDecoder jwtDecoder() {
return withJwkSetUri("https://example.org/.well-known/jwks.json")
.restOperations(this.rest).build();
}
@Bean
NimbusOpaqueTokenIntrospector tokenIntrospectionClient() {
return new NimbusOpaqueTokenIntrospector("https://example.org/introspect", this.rest);
}
}
private static <T> void registerMockBean(GenericApplicationContext context, String name, Class<T> clazz) {
context.registerBean(name, clazz, () -> mock(clazz));
}
private static class BearerTokenRequestPostProcessor implements RequestPostProcessor {
private boolean asRequestParameter;
private String token;
BearerTokenRequestPostProcessor(String token) {
this.token = token;
}
public BearerTokenRequestPostProcessor asParam() {
this.asRequestParameter = true;
return this;
}
@Override
public MockHttpServletRequest postProcessRequest(MockHttpServletRequest request) {
if (this.asRequestParameter) {
request.setParameter("access_token", this.token);
} else {
request.addHeader("Authorization", "Bearer " + this.token);
}
return request;
}
}
private static BearerTokenRequestPostProcessor bearerToken(String token) {
return new BearerTokenRequestPostProcessor(token);
}
private static ResultMatcher invalidRequestHeader(String message) {
return header().string(HttpHeaders.WWW_AUTHENTICATE,
AllOf.allOf(
new StringStartsWith("Bearer " +
"error=\"invalid_request\", " +
"error_description=\""),
new StringContains(message),
new StringEndsWith(", " +
"error_uri=\"https://tools.ietf.org/html/rfc6750#section-3.1\"")
)
);
}
private static ResultMatcher invalidTokenHeader(String message) {
return header().string(HttpHeaders.WWW_AUTHENTICATE,
AllOf.allOf(
new StringStartsWith("Bearer " +
"error=\"invalid_token\", " +
"error_description=\""),
new StringContains(message),
new StringEndsWith(", " +
"error_uri=\"https://tools.ietf.org/html/rfc6750#section-3.1\"")
)
);
}
private static ResultMatcher insufficientScopeHeader() {
return header().string(HttpHeaders.WWW_AUTHENTICATE, "Bearer " +
"error=\"insufficient_scope\"" +
", error_description=\"The request requires higher privileges than provided by the access token.\"" +
", error_uri=\"https://tools.ietf.org/html/rfc6750#section-3.1\"");
}
private void mockWebServer(String response) {
this.web.enqueue(new MockResponse()
.setResponseCode(200)
.setHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)
.setBody(response));
}
private void mockRestOperations(String response) {
RestOperations rest = this.spring.getContext().getBean(RestOperations.class);
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
ResponseEntity<String> entity = new ResponseEntity<>(response, headers, HttpStatus.OK);
when(rest.exchange(any(RequestEntity.class), eq(String.class)))
.thenReturn(entity);
}
private <T> T bean(Class<T> beanClass) {
return this.spring.getContext().getBean(beanClass);
}
private <T> T verifyBean(Class<T> beanClass) {
return verify(this.spring.getContext().getBean(beanClass));
}
private String json(String name) throws IOException {
return resource(name + ".json");
}
private String jwks(String name) throws IOException {
return resource(name + ".jwks");
}
private String token(String name) throws IOException {
return resource(name + ".token");
}
private String resource(String suffix) throws IOException {
String name = this.getClass().getSimpleName() + "-" + suffix;
ClassPathResource resource = new ClassPathResource(name, this.getClass());
try ( BufferedReader reader = new BufferedReader(new FileReader(resource.getFile())) ) {
return reader.lines().collect(Collectors.joining());
}
}
}
|
Polish OAuth2ResourceServerConfigurerTests
To confirm that resource server only produces SCOPE_<scope>
authorities by default.
Issue gh-7596
|
config/src/test/java/org/springframework/security/config/annotation/web/configurers/oauth2/server/resource/OAuth2ResourceServerConfigurerTests.java
|
Polish OAuth2ResourceServerConfigurerTests
|
|
Java
|
bsd-2-clause
|
296eb4bea99ab26c5adc3185ad8a670300a2186b
| 0
|
imagej/imagej-legacy,imagej/imagej-legacy,imagej/imagej-legacy,imagej/imagej-legacy
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2014 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.legacy;
import ij.ImagePlus;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Date;
import java.util.List;
import net.imagej.display.ImageDisplay;
import net.imagej.legacy.plugin.LegacyAppConfiguration;
import net.imagej.legacy.plugin.LegacyEditor;
import net.imagej.legacy.plugin.LegacyOpener;
import net.imagej.legacy.plugin.LegacyPostRefreshMenus;
import net.imagej.patcher.EssentialLegacyHooks;
import net.imagej.patcher.LegacyHooks;
import org.scijava.Context;
import org.scijava.log.LogService;
import org.scijava.log.StderrLogService;
import org.scijava.plugin.PluginInfo;
import org.scijava.plugin.PluginService;
import org.scijava.plugin.SciJavaPlugin;
import org.scijava.util.ListUtils;
/**
* The {@link LegacyHooks} encapsulating an active {@link LegacyService} for use within the patched ImageJ 1.x.
*
* @author Johannes Schindelin
*/
public class DefaultLegacyHooks extends EssentialLegacyHooks {
private LegacyService legacyService;
private Context context;
private PluginService pluginService;
private LogService log;
private IJ1Helper helper;
public DefaultLegacyHooks(LegacyService legacyService, IJ1Helper helper) {
this.legacyService = legacyService;
this.helper = helper;
}
/** @inherit */
@Override
public boolean isLegacyMode() {
return legacyService.isLegacyMode();
}
/** @inherit */
@Override
public Object getContext() {
return legacyService.getContext();
}
/** @inherit */
@Override
public boolean quit() {
dispose();
return isLegacyMode();
}
private LegacyEditor editor;
private LegacyAppConfiguration appConfig;
private List<LegacyPostRefreshMenus> afterRefreshMenus;
private List<LegacyOpener> legacyOpeners;
/** inherit */
@Override
public synchronized void installed() {
context = legacyService.getContext();
IJ1Helper.subscribeEvents(context);
pluginService = context.getService(PluginService.class);
log = context.getService(LogService.class);
if (log == null) log = new StderrLogService();
editor = createInstanceOfType(LegacyEditor.class);
appConfig = createInstanceOfType(LegacyAppConfiguration.class);
// TODO: inject context automatically?
afterRefreshMenus = pluginService.createInstancesOfType(LegacyPostRefreshMenus.class);
for (final LegacyPostRefreshMenus o : afterRefreshMenus) {
context.inject(o);
}
legacyOpeners = pluginService.createInstancesOfType(LegacyOpener.class);
for (final LegacyOpener o : legacyOpeners) {
context.inject(o);
}
}
// TODO: move to scijava-common?
private<PT extends SciJavaPlugin> PT createInstanceOfType(final Class<PT> type) {
if (pluginService == null) return null;
PluginInfo<PT> info = ListUtils.first(pluginService.getPluginsOfType(type));
return info == null ? null : pluginService.createInstance(info);
}
/** @inherit */
@Override
public void dispose() {
IJ1Helper.subscribeEvents(null);
// TODO: if there are still things open, we should object.
}
/** @inherit */
@Override
public Object interceptRunPlugIn(String className, String arg) {
if (LegacyService.class.getName().equals(className))
return legacyService;
if (Context.class.getName().equals(className))
return legacyService == null ? null : legacyService.getContext();
// Intercept IJ1 commands
if (helper != null) {
// intercept ij.plugins.Commands
if (helper.commandsName().equals(className)) {
if (arg.equals("open")) {
return interceptOpen(null, -1, true);
}
}
}
return null;
}
/** Resolution to use when converting double progress to int ratio. */
private static final int PROGRESS_GRANULARITY = 1000;
/** @inherit */
@Override
public void showProgress(double progress) {
final int currentIndex = (int) (PROGRESS_GRANULARITY * progress);
final int finalIndex = PROGRESS_GRANULARITY;
showProgress(currentIndex, finalIndex);
}
/** @inherit */
@Override
public void showProgress(int currentIndex, int finalIndex) {
if (!isLegacyMode()) {
legacyService.status().showProgress(currentIndex, finalIndex);
}
}
/** @inherit */
@Override
public void showStatus(final String status) {
if (!isInitialized()) {
return;
}
if (!isLegacyMode()) {
legacyService.status().showStatus(status);
}
}
/** @inherit */
@Override
public void registerImage(final Object o) {
final ImagePlus image = (ImagePlus) o;
if (image == null) return;
if (!image.isProcessor()) return;
if (image.getWindow() == null) return;
if (!isLegacyMode()) {
if (!Utils.isLegacyThread(Thread.currentThread())) return;
legacyService.log().debug("register legacy image: " + image);
}
try {
legacyService.getImageMap().registerLegacyImage(image);
} catch (UnsupportedOperationException e) {
// ignore: the dummy legacy service does not have an image map
}
}
/** @inherit */
@Override
public void unregisterImage(final Object o) {
final ImagePlus image = (ImagePlus) o;
if (isLegacyMode()) return;
if (image == null) return;
if (!Utils.isLegacyThread(Thread.currentThread())) return;
legacyService.log().debug("ImagePlus.hide(): " + image);
LegacyOutputTracker.removeOutput(image);
try {
ImageDisplay disp = legacyService.getImageMap().lookupDisplay(image);
if (disp == null) {
legacyService.getImageMap().unregisterLegacyImage(image);
}
else {
disp.close();
}
} catch (UnsupportedOperationException e) {
// ignore: the dummy legacy service does not have an image map
}
// end alternate
}
/** @inherit */
@Override
public void debug(String string) {
legacyService.log().debug(string);
}
/** @inherit */
@Override
public void error(Throwable t) {
legacyService.log().error(t);
}
private boolean isInitialized() {
return legacyService.isInitialized();
}
// if the ij.log.file property is set, log every message to the file pointed to
private BufferedWriter logFileWriter;
/** @inherit */
@Override
public void log(String message) {
if (message != null) {
String logFilePath = System.getProperty("ij.log.file");
if (logFilePath != null) {
try {
if (logFileWriter == null) {
java.io.OutputStream out = new java.io.FileOutputStream(
logFilePath, true);
java.io.Writer writer = new java.io.OutputStreamWriter(
out, "UTF-8");
logFileWriter = new java.io.BufferedWriter(writer);
logFileWriter.write("Started new log on " + new Date() + "\n");
}
logFileWriter.write(message);
if (!message.endsWith("\n"))
logFileWriter.newLine();
logFileWriter.flush();
} catch (Throwable t) {
t.printStackTrace();
System.getProperties().remove("ij.log.file");
logFileWriter = null;
}
}
}
}
/**
* Returns the application name for use with ImageJ 1.x.
*
* @return the application name
*/
@Override
public String getAppName() {
return appConfig == null ? "ImageJ (legacy)" : appConfig.getAppName();
}
/**
* Returns the icon for use with ImageJ 1.x.
*
* @return the application name
*/
@Override
public URL getIconURL() {
return appConfig == null ? getClass().getResource("/icons/imagej-256.png") : appConfig.getIconURL();
}
/** @inherit */
@Override
public void runAfterRefreshMenus() {
if (afterRefreshMenus != null) {
for (final Runnable run : afterRefreshMenus) {
run.run();
}
}
}
/**
* Opens the given path in the registered legacy editor, if any.
*
* @param path the path of the file to open
* @return whether the file was opened successfully
*/
@Override
public boolean openInEditor(final String path) {
if (editor == null) return false;
if (path.indexOf("://") > 0) return false;
// if it has no extension, do not open it in the legacy editor
if (!path.matches(".*\\.[0-9A-Za-z]{1,4}")) return false;
if (stackTraceContains(getClass().getName() + ".openInEditor(")) return false;
final File file = new File(path);
if (!file.exists()) return false;
if (isBinaryFile(file)) return false;
return editor.open(file);
}
/**
* Creates the given file in the registered legacy editor, if any.
*
* @param title the title of the file to create
* @param content the text of the file to be created
* @return whether the fule was opened successfully
*/
@Override
public boolean createInEditor(final String title, final String content) {
if (editor == null) return false;
return editor.create(title, content);
}
/**
* Determines whether a file is binary or text.
*
* This just checks for a NUL in the first 1024 bytes.
* Not the best test, but a pragmatic one.
*
* @param file the file to test
* @return whether it is binary
*/
private static boolean isBinaryFile(final File file) {
try {
InputStream in = new FileInputStream(file);
byte[] buffer = new byte[1024];
int offset = 0;
while (offset < buffer.length) {
int count = in.read(buffer, offset, buffer.length - offset);
if (count < 0) break;
offset += count;
}
in.close();
while (offset > 0) {
if (buffer[--offset] == 0) {
return true;
}
}
} catch (IOException e) {
}
return false;
}
/**
* Determines whether the current stack trace contains the specified string.
*
* @param needle the text to find
* @return whether the stack trace contains the text
*/
private static boolean stackTraceContains(String needle) {
final StackTraceElement[] trace = Thread.currentThread().getStackTrace();
// exclude elements up to, and including, the caller
for (int i = 3; i < trace.length; i++) {
if (trace[i].toString().contains(needle)) return true;
}
return false;
}
/** @inherit */
@Override
public Object interceptOpen(final String path, final int planeIndex,
final boolean display)
{
for (final LegacyOpener opener : legacyOpeners) {
final Object result = opener.open(path, planeIndex, display);
if (result != null) return result;
}
return null;
}
}
|
src/main/java/net/imagej/legacy/DefaultLegacyHooks.java
|
/*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2009 - 2014 Board of Regents of the University of
* Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck
* Institute of Molecular Cell Biology and Genetics.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.legacy;
import ij.ImagePlus;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.Date;
import java.util.List;
import net.imagej.display.ImageDisplay;
import net.imagej.legacy.plugin.LegacyAppConfiguration;
import net.imagej.legacy.plugin.LegacyEditor;
import net.imagej.legacy.plugin.LegacyOpener;
import net.imagej.legacy.plugin.LegacyPostRefreshMenus;
import net.imagej.patcher.EssentialLegacyHooks;
import net.imagej.patcher.LegacyHooks;
import org.scijava.Context;
import org.scijava.log.LogService;
import org.scijava.log.StderrLogService;
import org.scijava.plugin.PluginInfo;
import org.scijava.plugin.PluginService;
import org.scijava.plugin.SciJavaPlugin;
import org.scijava.util.ListUtils;
/**
* The {@link LegacyHooks} encapsulating an active {@link LegacyService} for use within the patched ImageJ 1.x.
*
* @author Johannes Schindelin
*/
public class DefaultLegacyHooks extends EssentialLegacyHooks {
private LegacyService legacyService;
private Context context;
private PluginService pluginService;
private LogService log;
private IJ1Helper helper;
public DefaultLegacyHooks(LegacyService legacyService, IJ1Helper helper) {
this.legacyService = legacyService;
this.helper = helper;
}
/** @inherit */
@Override
public boolean isLegacyMode() {
return legacyService.isLegacyMode();
}
/** @inherit */
@Override
public Object getContext() {
return legacyService.getContext();
}
/** @inherit */
@Override
public boolean quit() {
dispose();
return isLegacyMode();
}
private LegacyEditor editor;
private LegacyAppConfiguration appConfig;
private List<LegacyPostRefreshMenus> afterRefreshMenus;
private List<LegacyOpener> legacyOpeners;
/** inherit */
@Override
public synchronized void installed() {
context = legacyService.getContext();
IJ1Helper.subscribeEvents(context);
pluginService = context.getService(PluginService.class);
log = context.getService(LogService.class);
if (log == null) log = new StderrLogService();
editor = createInstanceOfType(LegacyEditor.class);
appConfig = createInstanceOfType(LegacyAppConfiguration.class);
// TODO: inject context automatically?
afterRefreshMenus = pluginService.createInstancesOfType(LegacyPostRefreshMenus.class);
for (final LegacyPostRefreshMenus o : afterRefreshMenus) {
context.inject(o);
}
legacyOpeners = pluginService.createInstancesOfType(LegacyOpener.class);
for (final LegacyOpener o : legacyOpeners) {
context.inject(o);
}
}
// TODO: move to scijava-common?
private<PT extends SciJavaPlugin> PT createInstanceOfType(final Class<PT> type) {
if (pluginService == null) return null;
PluginInfo<PT> info = ListUtils.first(pluginService.getPluginsOfType(type));
return info == null ? null : pluginService.createInstance(info);
}
/** @inherit */
@Override
public void dispose() {
IJ1Helper.subscribeEvents(null);
// TODO: if there are still things open, we should object.
}
/** @inherit */
@Override
public Object interceptRunPlugIn(String className, String arg) {
if (LegacyService.class.getName().equals(className))
return legacyService;
if (Context.class.getName().equals(className))
return legacyService == null ? null : legacyService.getContext();
return null;
}
/** Resolution to use when converting double progress to int ratio. */
private static final int PROGRESS_GRANULARITY = 1000;
/** @inherit */
@Override
public void showProgress(double progress) {
final int currentIndex = (int) (PROGRESS_GRANULARITY * progress);
final int finalIndex = PROGRESS_GRANULARITY;
showProgress(currentIndex, finalIndex);
}
/** @inherit */
@Override
public void showProgress(int currentIndex, int finalIndex) {
if (!isLegacyMode()) {
legacyService.status().showProgress(currentIndex, finalIndex);
}
}
/** @inherit */
@Override
public void showStatus(final String status) {
if (!isInitialized()) {
return;
}
if (!isLegacyMode()) {
legacyService.status().showStatus(status);
}
}
/** @inherit */
@Override
public void registerImage(final Object o) {
final ImagePlus image = (ImagePlus) o;
if (image == null) return;
if (!image.isProcessor()) return;
if (image.getWindow() == null) return;
if (!isLegacyMode()) {
if (!Utils.isLegacyThread(Thread.currentThread())) return;
legacyService.log().debug("register legacy image: " + image);
}
try {
legacyService.getImageMap().registerLegacyImage(image);
} catch (UnsupportedOperationException e) {
// ignore: the dummy legacy service does not have an image map
}
}
/** @inherit */
@Override
public void unregisterImage(final Object o) {
final ImagePlus image = (ImagePlus) o;
if (isLegacyMode()) return;
if (image == null) return;
if (!Utils.isLegacyThread(Thread.currentThread())) return;
legacyService.log().debug("ImagePlus.hide(): " + image);
LegacyOutputTracker.removeOutput(image);
try {
ImageDisplay disp = legacyService.getImageMap().lookupDisplay(image);
if (disp == null) {
legacyService.getImageMap().unregisterLegacyImage(image);
}
else {
disp.close();
}
} catch (UnsupportedOperationException e) {
// ignore: the dummy legacy service does not have an image map
}
// end alternate
}
/** @inherit */
@Override
public void debug(String string) {
legacyService.log().debug(string);
}
/** @inherit */
@Override
public void error(Throwable t) {
legacyService.log().error(t);
}
private boolean isInitialized() {
return legacyService.isInitialized();
}
// if the ij.log.file property is set, log every message to the file pointed to
private BufferedWriter logFileWriter;
/** @inherit */
@Override
public void log(String message) {
if (message != null) {
String logFilePath = System.getProperty("ij.log.file");
if (logFilePath != null) {
try {
if (logFileWriter == null) {
java.io.OutputStream out = new java.io.FileOutputStream(
logFilePath, true);
java.io.Writer writer = new java.io.OutputStreamWriter(
out, "UTF-8");
logFileWriter = new java.io.BufferedWriter(writer);
logFileWriter.write("Started new log on " + new Date() + "\n");
}
logFileWriter.write(message);
if (!message.endsWith("\n"))
logFileWriter.newLine();
logFileWriter.flush();
} catch (Throwable t) {
t.printStackTrace();
System.getProperties().remove("ij.log.file");
logFileWriter = null;
}
}
}
}
/**
* Returns the application name for use with ImageJ 1.x.
*
* @return the application name
*/
@Override
public String getAppName() {
return appConfig == null ? "ImageJ (legacy)" : appConfig.getAppName();
}
/**
* Returns the icon for use with ImageJ 1.x.
*
* @return the application name
*/
@Override
public URL getIconURL() {
return appConfig == null ? getClass().getResource("/icons/imagej-256.png") : appConfig.getIconURL();
}
/** @inherit */
@Override
public void runAfterRefreshMenus() {
if (afterRefreshMenus != null) {
for (final Runnable run : afterRefreshMenus) {
run.run();
}
}
}
/**
* Opens the given path in the registered legacy editor, if any.
*
* @param path the path of the file to open
* @return whether the file was opened successfully
*/
@Override
public boolean openInEditor(final String path) {
if (editor == null) return false;
if (path.indexOf("://") > 0) return false;
// if it has no extension, do not open it in the legacy editor
if (!path.matches(".*\\.[0-9A-Za-z]{1,4}")) return false;
if (stackTraceContains(getClass().getName() + ".openInEditor(")) return false;
final File file = new File(path);
if (!file.exists()) return false;
if (isBinaryFile(file)) return false;
return editor.open(file);
}
/**
* Creates the given file in the registered legacy editor, if any.
*
* @param title the title of the file to create
* @param content the text of the file to be created
* @return whether the fule was opened successfully
*/
@Override
public boolean createInEditor(final String title, final String content) {
if (editor == null) return false;
return editor.create(title, content);
}
/**
* Determines whether a file is binary or text.
*
* This just checks for a NUL in the first 1024 bytes.
* Not the best test, but a pragmatic one.
*
* @param file the file to test
* @return whether it is binary
*/
private static boolean isBinaryFile(final File file) {
try {
InputStream in = new FileInputStream(file);
byte[] buffer = new byte[1024];
int offset = 0;
while (offset < buffer.length) {
int count = in.read(buffer, offset, buffer.length - offset);
if (count < 0) break;
offset += count;
}
in.close();
while (offset > 0) {
if (buffer[--offset] == 0) {
return true;
}
}
} catch (IOException e) {
}
return false;
}
/**
* Determines whether the current stack trace contains the specified string.
*
* @param needle the text to find
* @return whether the stack trace contains the text
*/
private static boolean stackTraceContains(String needle) {
final StackTraceElement[] trace = Thread.currentThread().getStackTrace();
// exclude elements up to, and including, the caller
for (int i = 3; i < trace.length; i++) {
if (trace[i].toString().contains(needle)) return true;
}
return false;
}
/** @inherit */
@Override
public Object interceptOpen(final String path, final int planeIndex,
final boolean display)
{
for (final LegacyOpener opener : legacyOpeners) {
final Object result = opener.open(path, planeIndex, display);
if (result != null) return result;
}
return null;
}
}
|
DefaultLegacyHooks: intercept open command
If the ij.plugin.Commands open method is executed, we will now intercept
and delegate to the interceptOpen method, allowing LegacyOpeners to be
used.
|
src/main/java/net/imagej/legacy/DefaultLegacyHooks.java
|
DefaultLegacyHooks: intercept open command
|
|
Java
|
bsd-3-clause
|
624b1af7926171fb4c95bf9ee1aece44b6cd2f72
| 0
|
NCIP/psc,NCIP/psc,NCIP/psc,NCIP/psc
|
package edu.northwestern.bioinformatics.studycalendar.utils.hibernate;
import edu.nwu.bioinformatics.commons.ComparisonUtils;
import org.hibernate.usertype.UserType;
import org.hibernate.usertype.ParameterizedType;
import org.hibernate.HibernateException;
import org.apache.commons.logging.Log;
import java.util.Properties;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.PreparedStatement;
import java.sql.Types;
import java.io.Serializable;
import edu.northwestern.bioinformatics.studycalendar.StudyCalendarError;
import edu.northwestern.bioinformatics.studycalendar.StudyCalendarSystemException;
/**
* A Hibernate UserType for subclasses of {@link edu.northwestern.bioinformatics.studycalendar.domain.AbstractControlledVocabularyObject}.
* Required parameter:
* <dl>
* <dt><code>enumClass</code></dt>
* <dd>Typesafe enumeration class of which this type instance will load instances</dd>
* </dl>
* Optional parameters:
* <dl>
* <dt><code>factoryMethod</code></dt>
* <dd>The public static method to call to obtain an instance of the class from a database key.
* Default is <kbd>getById</kbd>.</dd>
* <dt><code>keyMethod</code></dt>
* <dd>The public method to call on an instance of the class to get the database key under
* which it should be stored. Default is <kbd>getId</kbd>.</dd>
* </dl>
*
* @author Rhett Sutphin
*/
public class ControlledVocabularyObjectType implements UserType, ParameterizedType {
protected static final String ENUM_CLASS_PARAM_KEY = "enumClass";
protected static final String FACTORY_METHOD_PARAM_KEY = "factoryMethod";
protected static final String KEY_METHOD_PARAM_KEY = "keyMethod";
private static final String DEFAULT_FACTORY_METHOD_NAME = "getById";
private static final String DEFAULT_KEY_METHOD_NAME = "getId";
private static final Class[] NO_PARAMS = new Class[0];
private Log log = HibernateTypeUtils.getLog(getClass());
private Properties parameterValues;
protected final Properties getParameterValues() {
return parameterValues;
}
////// IMPLEMENTATION of ParameterizedType
public void setParameterValues(Properties parameters) {
this.parameterValues = new Properties(createDefaults());
if (parameters != null) {
this.parameterValues.putAll(parameters);
}
// call various methods so that they have an opportunity to fail during initialization
getEnumClass();
getFactoryMethod();
getKeyMethod();
}
private Properties createDefaults() {
Properties defaults = new Properties();
defaults.put(FACTORY_METHOD_PARAM_KEY, DEFAULT_FACTORY_METHOD_NAME);
defaults.put(KEY_METHOD_PARAM_KEY, DEFAULT_KEY_METHOD_NAME);
return defaults;
}
////// IMPLEMENTATION OF UserType
public final int[] sqlTypes() {
return new int[] { Types.INTEGER };
}
public Class returnedClass() {
return getEnumClass();
}
private Class getEnumClass() {
if (getEnumClassName() == null) {
throw new StudyCalendarError("required enumClass parameter not specified");
}
try {
return Class.forName(getEnumClassName());
} catch (ClassNotFoundException e) {
throw new StudyCalendarError("enumClass " + getEnumClassName() + " does not exist", e);
}
}
private String getEnumClassName() {
return getParameterValues().getProperty(ENUM_CLASS_PARAM_KEY);
}
private Method getFactoryMethod() {
return getParameterNamedMethod(FACTORY_METHOD_PARAM_KEY, new Class[] { Integer.TYPE });
}
private Method getKeyMethod() {
return getParameterNamedMethod(KEY_METHOD_PARAM_KEY, NO_PARAMS);
}
private Method getParameterNamedMethod(String paramKey, Class[] parameterTypes) {
String methodName = getParameterValues().getProperty(paramKey);
try {
return getEnumClass().getMethod(methodName, parameterTypes);
} catch (NoSuchMethodException e) {
throw new StudyCalendarError("enumClass " + getEnumClassName()
+ " has no method named " + methodName, e);
}
}
protected Object getKeyObject(ResultSet rs, String colname) throws SQLException {
return rs.getInt(colname);
}
public Object nullSafeGet(ResultSet rs, String[] names, Object owner) throws HibernateException, SQLException {
Object key = getKeyObject(rs, names[0]);
Object value = null;
if (key != null) {
Method factoryMethod = getFactoryMethod();
try {
value = factoryMethod.invoke(null, key);
} catch (IllegalArgumentException iae) {
throw new StudyCalendarSystemException("Invocation of " + factoryMethod
+ " with key=" + key + " (" + key.getClass().getName() + ") failed", iae);
} catch (IllegalAccessException e) {
throw new StudyCalendarSystemException("Cannot access factoryMethod " + factoryMethod, e);
} catch (InvocationTargetException e) {
throw new StudyCalendarSystemException("Invocation of " + factoryMethod + " failed", e);
}
}
HibernateTypeUtils.logReturn(log, names[0], value);
return value;
}
public void nullSafeSet(PreparedStatement st, Object value, int index) throws HibernateException, SQLException {
Method keyMethod = getKeyMethod();
Object key = null;
try {
if (value != null) {
key = keyMethod.invoke(value, new Object[0]);
}
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("Could not call keyMethod " + keyMethod + " on value " + value, iae);
} catch (IllegalAccessException e) {
throw new StudyCalendarSystemException("Cannot access keyMethod " + keyMethod, e);
} catch (InvocationTargetException e) {
throw new StudyCalendarSystemException("Invocation of " + keyMethod + " failed", e);
}
HibernateTypeUtils.logBind(log, index, key);
st.setObject(index, key, Types.INTEGER);
}
public Object deepCopy(Object value) throws HibernateException {
return value;
}
public boolean isMutable() {
return false;
}
public boolean equals(Object x, Object y) throws HibernateException {
return ComparisonUtils.nullSafeEquals(x, y);
}
public int hashCode(Object x) throws HibernateException {
return x == null ? 0 : x.hashCode();
}
public Serializable disassemble(Object value) throws HibernateException {
return (Serializable) value;
}
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return cached;
}
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return original;
}
}
|
src/edu/northwestern/bioinformatics/studycalendar/utils/hibernate/ControlledVocabularyObjectType.java
|
package edu.northwestern.bioinformatics.studycalendar.utils.hibernate;
import edu.nwu.bioinformatics.commons.ComparisonUtils;
import org.hibernate.usertype.UserType;
import org.hibernate.usertype.ParameterizedType;
import org.hibernate.HibernateException;
import org.apache.commons.logging.Log;
import java.util.Properties;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.PreparedStatement;
import java.sql.Types;
import java.io.Serializable;
import edu.northwestern.bioinformatics.studycalendar.StudyCalendarError;
import edu.northwestern.bioinformatics.studycalendar.StudyCalendarSystemException;
/**
* A Hibernate UserType for subclasses of {@link edu.northwestern.bioinformatics.studycalendar.domain.AbstractControlledVocabularyObject}.
* Required parameter:
* <dl>
* <dt><code>enumClass</code></dt>
* <dd>Typesafe enumeration class of which this type instance will load instances</dd>
* </dl>
* Optional parameters:
* <dl>
* <dt><code>factoryMethod</code></dt>
* <dd>The public static method to call to obtain an instance of the class from a database key.
* Default is <kbd>getById</kbd>.</dd>
* <dt><code>keyMethod</code></dt>
* <dd>The public method to call on an instance of the class to get the database key under
* which it should be stored. Default is <kbd>getId</kbd>.</dd>
* </dl>
*
* @author Rhett Sutphin
*/
public class ControlledVocabularyObjectType implements UserType, ParameterizedType {
protected static final String ENUM_CLASS_PARAM_KEY = "enumClass";
protected static final String FACTORY_METHOD_PARAM_KEY = "factoryMethod";
protected static final String KEY_METHOD_PARAM_KEY = "keyMethod";
private static final String DEFAULT_FACTORY_METHOD_NAME = "getById";
private static final String DEFAULT_KEY_METHOD_NAME = "getId";
private static final Class[] NO_PARAMS = new Class[0];
private Log log = HibernateTypeUtils.getLog(getClass());
private Properties parameterValues;
protected final Properties getParameterValues() {
return parameterValues;
}
////// IMPLEMENTATION of ParameterizedType
public void setParameterValues(Properties parameters) {
this.parameterValues = new Properties(createDefaults());
if (parameters != null) {
this.parameterValues.putAll(parameters);
}
// call various methods so that they have an opportunity to fail during initialization
getEnumClass();
getFactoryMethod();
getKeyMethod();
}
private Properties createDefaults() {
Properties defaults = new Properties();
defaults.put(FACTORY_METHOD_PARAM_KEY, DEFAULT_FACTORY_METHOD_NAME);
defaults.put(KEY_METHOD_PARAM_KEY, DEFAULT_KEY_METHOD_NAME);
return defaults;
}
////// IMPLEMENTATION OF UserType
public final int[] sqlTypes() {
return new int[] { Types.INTEGER };
}
public Class returnedClass() {
return getEnumClass();
}
private Class getEnumClass() {
if (getEnumClassName() == null) {
throw new StudyCalendarError("required enumClass parameter not specified");
}
try {
return Class.forName(getEnumClassName());
} catch (ClassNotFoundException e) {
throw new StudyCalendarError("enumClass " + getEnumClassName() + " does not exist", e);
}
}
private String getEnumClassName() {
return getParameterValues().getProperty(ENUM_CLASS_PARAM_KEY);
}
private Method getFactoryMethod() {
return getParameterNamedMethod(FACTORY_METHOD_PARAM_KEY, new Class[] { Integer.TYPE });
}
private Method getKeyMethod() {
return getParameterNamedMethod(KEY_METHOD_PARAM_KEY, NO_PARAMS);
}
private Method getParameterNamedMethod(String paramKey, Class[] parameterTypes) {
String methodName = getParameterValues().getProperty(paramKey);
try {
return getEnumClass().getMethod(methodName, parameterTypes);
} catch (NoSuchMethodException e) {
throw new StudyCalendarError("enumClass " + getEnumClassName()
+ " has no method named " + methodName, e);
}
}
protected Object getKeyObject(ResultSet rs, String colname) throws SQLException {
return rs.getObject(colname);
}
public Object nullSafeGet(ResultSet rs, String[] names, Object owner) throws HibernateException, SQLException {
Object key = getKeyObject(rs, names[0]);
Object value = null;
if (key != null) {
Method factoryMethod = getFactoryMethod();
try {
value = factoryMethod.invoke(null, key);
} catch (IllegalArgumentException iae) {
throw new StudyCalendarSystemException("Invocation of " + factoryMethod
+ " with key=" + key + " (" + key.getClass().getName() + ") failed", iae);
} catch (IllegalAccessException e) {
throw new StudyCalendarSystemException("Cannot access factoryMethod " + factoryMethod, e);
} catch (InvocationTargetException e) {
throw new StudyCalendarSystemException("Invocation of " + factoryMethod + " failed", e);
}
}
HibernateTypeUtils.logReturn(log, names[0], value);
return value;
}
public void nullSafeSet(PreparedStatement st, Object value, int index) throws HibernateException, SQLException {
Method keyMethod = getKeyMethod();
Object key = null;
try {
if (value != null) {
key = keyMethod.invoke(value, new Object[0]);
}
} catch (IllegalArgumentException iae) {
throw new IllegalArgumentException("Could not call keyMethod " + keyMethod + " on value " + value, iae);
} catch (IllegalAccessException e) {
throw new StudyCalendarSystemException("Cannot access keyMethod " + keyMethod, e);
} catch (InvocationTargetException e) {
throw new StudyCalendarSystemException("Invocation of " + keyMethod + " failed", e);
}
HibernateTypeUtils.logBind(log, index, key);
st.setObject(index, key, Types.INTEGER);
}
public Object deepCopy(Object value) throws HibernateException {
return value;
}
public boolean isMutable() {
return false;
}
public boolean equals(Object x, Object y) throws HibernateException {
return ComparisonUtils.nullSafeEquals(x, y);
}
public int hashCode(Object x) throws HibernateException {
return x == null ? 0 : x.hashCode();
}
public Serializable disassemble(Object value) throws HibernateException {
return (Serializable) value;
}
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return cached;
}
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return original;
}
}
|
Explicitly load key as int to work around oracle issue
|
src/edu/northwestern/bioinformatics/studycalendar/utils/hibernate/ControlledVocabularyObjectType.java
|
Explicitly load key as int to work around oracle issue
|
|
Java
|
apache-2.0
|
bbfd28082e855d44cd4a72fa0e0bc4614ee500d0
| 0
|
heiko-braun/wildfly-metrics,heiko-braun/wildfly-metrics,heiko-braun/wildfly-metrics
|
src/test/java/jetbrains/exodus/entitystore/MetricStoreExample.java
|
/**
* Copyright 2010 - 2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jetbrains.exodus.entitystore;
import org.jetbrains.annotations.NotNull;
import java.io.*;
public class MetricStoreExample {
public static final String METRIC_NAME = "jvm.heap.size";
public static final String SAMPLE_NAME = "measurement";
public static void main(String[] args) {
//Create or open persistent store under directory "data"
final PersistentEntityStoreImpl store = PersistentEntityStores.newInstance("data");
// Create new metric and link samples
final EntityId blogId = store.computeInTransaction(new StoreTransactionalComputable<EntityId>() {
@Override
public EntityId compute(@NotNull StoreTransaction txn) {
final Entity metric = txn.newEntity(METRIC_NAME);
metric.setProperty("name", "JVM heap size in mb");
double random = Math.random();
for(int i=0;i<10000; i++)
try {
double probe = random*100;
int heap = (int)probe;
Entity sample = createNewSample(txn, System.currentTimeMillis(), heap);
metric.addLink("samples", sample);
} catch (IOException e) {
throw new RuntimeException(e);
}
return metric.getId();
}
});
// Load blog and show posts and print content
store.executeInTransaction(new StoreTransactionalExecutable() {
@Override
public void execute(@NotNull StoreTransaction txn) {
final EntityIterable metrics = txn.getAll(METRIC_NAME);
for (Entity sample : metrics) {
final Iterable<Entity> blogItems = sample.getLinks("samples");
for (Entity item : blogItems) {
try {
printSample(item);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
});
// Close store when we are done
store.close();
}
private static void printSample(Entity item) throws IOException {
System.out.println("\tTimestamp: " + item.getProperty("timestamp"));
System.out.println("\tValue: " + item.getProperty("value"));
}
private static Entity createNewSample(StoreTransaction txn, long timestamp, int heap) throws IOException {
final Entity sample = txn.newEntity(SAMPLE_NAME);
sample.setProperty("timestamp", timestamp);
sample.setProperty("value", heap);
return sample;
}
}
|
delete store example
|
src/test/java/jetbrains/exodus/entitystore/MetricStoreExample.java
|
delete store example
|
||
Java
|
mit
|
5fa3b1795685133cfc3164e509ccd11577265d4d
| 0
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
package com.semmle.js.extractor;
import java.io.File;
import java.nio.file.Path;
import java.util.regex.Pattern;
import com.semmle.extractor.html.HtmlPopulator;
import com.semmle.js.extractor.ExtractorConfig.Platform;
import com.semmle.js.extractor.ExtractorConfig.SourceType;
import com.semmle.js.parser.ParseError;
import com.semmle.util.data.Option;
import com.semmle.util.data.StringUtil;
import com.semmle.util.io.WholeIO;
import com.semmle.util.trap.TrapWriter;
import net.htmlparser.jericho.Attribute;
import net.htmlparser.jericho.Attributes;
import net.htmlparser.jericho.Element;
import net.htmlparser.jericho.HTMLElementName;
import net.htmlparser.jericho.RowColumnVector;
import net.htmlparser.jericho.Segment;
/** Extractor for handling HTML and XHTML files. */
public class HTMLExtractor implements IExtractor {
private class JavaScriptHTMLElementHandler implements HtmlPopulator.ElementHandler {
private final ScopeManager scopeManager;
private final TextualExtractor textualExtractor;
private LoCInfo locInfo;
public JavaScriptHTMLElementHandler(TextualExtractor textualExtractor) {
this.textualExtractor = textualExtractor;
this.locInfo = new LoCInfo(0, 0);
this.scopeManager =
new ScopeManager(textualExtractor.getTrapwriter(), config.getEcmaVersion());
}
/*
* Extract all JavaScript snippets appearing in (in-line) script elements and as
* attribute values.
*/
@Override
public void handleElement(Element elt) {
LoCInfo snippetLoC = null;
if (elt.getName().equals(HTMLElementName.SCRIPT)) {
SourceType sourceType = getScriptSourceType(elt, textualExtractor.getExtractedFile());
if (sourceType != null) {
// Jericho sometimes misparses empty elements, which will show up as start tags
// ending in "/"; we manually exclude these cases to avoid spurious syntax
// errors
if (elt.getStartTag().getTagContent().toString().trim().endsWith("/")) return;
Segment content = elt.getContent();
String source = content.toString();
boolean isTypeScript = isTypeScriptTag(elt);
/*
* Script blocks in XHTML files may wrap (parts of) their code inside CDATA
* sections. We need to unwrap them in order not to confuse the JavaScript
* parser.
*
* Note that CDATA sections do not nest, so they can be detected by a regular
* expression.
*
* In order to preserve position information, we replace the CDATA section
* markers with an equivalent number of whitespace characters. This will yield
* surprising results for CDATA sections inside string literals, but those are
* likely to be rare.
*/
source = source.replace("<![CDATA[", " ").replace("]]>", " ");
if (!source.trim().isEmpty()) {
RowColumnVector contentStart = content.getRowColumnVector();
snippetLoC =
extractSnippet(
1,
config.withSourceType(sourceType),
scopeManager,
textualExtractor,
source,
contentStart.getRow(),
contentStart.getColumn(),
isTypeScript);
}
}
} else {
Attributes attributes = elt.getAttributes();
// attributes can be null for directives
if (attributes != null)
for (Attribute attr : attributes) {
// ignore empty attributes
if (attr.getValue() == null || attr.getValue().isEmpty()) continue;
String source = attr.getValue();
RowColumnVector valueStart = attr.getValueSegment().getRowColumnVector();
if (JS_ATTRIBUTE.matcher(attr.getName()).matches()) {
snippetLoC =
extractSnippet(
2,
config,
scopeManager,
textualExtractor,
source,
valueStart.getRow(),
valueStart.getColumn(),
false /* isTypeScript */);
} else if (source.startsWith("javascript:")) {
source = source.substring(11);
snippetLoC =
extractSnippet(
3,
config,
scopeManager,
textualExtractor,
source,
valueStart.getRow(),
valueStart.getColumn() + 11,
false /* isTypeScript */);
}
}
}
if (snippetLoC != null) locInfo.add(snippetLoC);
}
public LoCInfo getLoCInfo() {
return this.locInfo;
}
}
/** List of HTML attributes whose value is interpreted as JavaScript. */
private static final Pattern JS_ATTRIBUTE =
Pattern.compile(
"^on(abort|blur|change|(dbl)?click|error|focus|key(down|press|up)|load|mouse(down|move|out|over|up)|re(set|size)|select|submit|unload)$",
Pattern.CASE_INSENSITIVE);
private final ExtractorConfig config;
private final ExtractorState state;
public HTMLExtractor(ExtractorConfig config, ExtractorState state) {
this.config = config.withPlatform(Platform.WEB);
this.state = state;
}
@Override
public LoCInfo extract(TextualExtractor textualExtractor) {
// Angular templates contain attribute names that are not valid HTML/XML, such as [foo], (foo), [(foo)], and *foo.
// Allow a large number of errors in attribute names, so the Jericho parser does not give up.
Attributes.setDefaultMaxErrorCount(100);
JavaScriptHTMLElementHandler eltHandler = new JavaScriptHTMLElementHandler(textualExtractor);
HtmlPopulator extractor =
new HtmlPopulator(
this.config.getHtmlHandling(),
textualExtractor.getSource(),
textualExtractor.getTrapwriter(),
textualExtractor.getLocationManager().getFileLabel());
extractor.doit(Option.some(eltHandler));
return eltHandler.getLoCInfo();
}
/**
* Deduce the {@link SourceType} with which the given <code>script</code> element should be
* extracted, returning <code>null</code> if it cannot be determined.
*/
private SourceType getScriptSourceType(Element script, File file) {
String scriptType = getAttributeValueLC(script, "type");
String scriptLanguage = getScriptLanguage(script);
SourceType fallbackSourceType = config.getSourceType();
if (file.getName().endsWith(".vue")) {
fallbackSourceType = SourceType.MODULE;
}
if (isTypeScriptTag(script)) return fallbackSourceType;
// if `type` and `language` are both either missing, contain the
// string "javascript", or if `type` is the string "text/jsx", this is a plain
// script
if ((scriptType == null || scriptType.contains("javascript") || "text/jsx".equals(scriptType))
&& (scriptLanguage == null || scriptLanguage.contains("javascript")))
// use default source type
return fallbackSourceType;
// if `type` is "text/babel", the source type depends on the `data-plugins`
// attribute
if ("text/babel".equals(scriptType)) {
String plugins = getAttributeValueLC(script, "data-plugins");
if (plugins != null && plugins.contains("transform-es2015-modules-umd")) {
return SourceType.MODULE;
}
return fallbackSourceType;
}
// if `type` is "module", extract as module
if ("module".equals(scriptType)) return SourceType.MODULE;
return null;
}
private String getScriptLanguage(Element script) {
String scriptLanguage = getAttributeValueLC(script, "language");
if (scriptLanguage == null) { // Vue templates use 'lang' instead of 'language'.
scriptLanguage = getAttributeValueLC(script, "lang");
}
return scriptLanguage;
}
private boolean isTypeScriptTag(Element script) {
String language = getScriptLanguage(script);
if ("ts".equals(language) || "typescript".equals(language)) return true;
String type = getAttributeValueLC(script, "type");
if (type != null && type.contains("typescript")) return true;
return false;
}
/**
* Get the value of attribute <code>attr</code> of element <code>elt</code> in lower case; if the
* attribute has no value, <code>null</code> is returned.
*/
private String getAttributeValueLC(Element elt, String attr) {
String val = elt.getAttributeValue(attr);
return val == null ? val : StringUtil.lc(val);
}
private LoCInfo extractSnippet(
int toplevelKind,
ExtractorConfig config,
ScopeManager scopeManager,
TextualExtractor textualExtractor,
String source,
int line,
int column,
boolean isTypeScript) {
if (isTypeScript) {
Path file = textualExtractor.getExtractedFile().toPath();
FileSnippet snippet =
new FileSnippet(file, line, column, toplevelKind, config.getSourceType());
VirtualSourceRoot vroot = config.getVirtualSourceRoot();
// Vue files are special in that they can be imported as modules, and may only
// contain one <script> tag.
// For .vue files we omit the usual snippet decoration to ensure the TypeScript
// compiler can find it.
Path virtualFile =
file.getFileName().toString().endsWith(".vue")
? vroot.toVirtualFile(file.resolveSibling(file.getFileName() + ".ts"))
: vroot.getVirtualFileForSnippet(snippet, ".ts");
if (virtualFile != null) {
virtualFile = virtualFile.toAbsolutePath().normalize();
synchronized (vroot.getLock()) {
new WholeIO().strictwrite(virtualFile, source);
}
state.getSnippets().put(virtualFile, snippet);
}
return null; // LoC info is accounted for later
}
TrapWriter trapwriter = textualExtractor.getTrapwriter();
LocationManager locationManager = textualExtractor.getLocationManager();
LocationManager scriptLocationManager =
new LocationManager(
locationManager.getSourceFile(), trapwriter, locationManager.getFileLabel());
scriptLocationManager.setStart(line, column);
JSExtractor extractor = new JSExtractor(config);
try {
TextualExtractor tx =
new TextualExtractor(
trapwriter,
scriptLocationManager,
source,
config.getExtractLines(),
textualExtractor.getMetrics(),
textualExtractor.getExtractedFile());
return extractor.extract(tx, source, toplevelKind, scopeManager).snd();
} catch (ParseError e) {
e.setPosition(scriptLocationManager.translatePosition(e.getPosition()));
throw e.asUserError();
}
}
}
|
javascript/extractor/src/com/semmle/js/extractor/HTMLExtractor.java
|
package com.semmle.js.extractor;
import java.io.File;
import java.nio.file.Path;
import java.util.regex.Pattern;
import com.semmle.extractor.html.HtmlPopulator;
import com.semmle.js.extractor.ExtractorConfig.Platform;
import com.semmle.js.extractor.ExtractorConfig.SourceType;
import com.semmle.js.parser.ParseError;
import com.semmle.util.data.Option;
import com.semmle.util.data.StringUtil;
import com.semmle.util.io.WholeIO;
import com.semmle.util.trap.TrapWriter;
import net.htmlparser.jericho.Attribute;
import net.htmlparser.jericho.Attributes;
import net.htmlparser.jericho.Element;
import net.htmlparser.jericho.HTMLElementName;
import net.htmlparser.jericho.RowColumnVector;
import net.htmlparser.jericho.Segment;
/** Extractor for handling HTML and XHTML files. */
public class HTMLExtractor implements IExtractor {
private class JavaScriptHTMLElementHandler implements HtmlPopulator.ElementHandler {
private final ScopeManager scopeManager;
private final TextualExtractor textualExtractor;
private LoCInfo locInfo;
public JavaScriptHTMLElementHandler(TextualExtractor textualExtractor) {
this.textualExtractor = textualExtractor;
this.locInfo = new LoCInfo(0, 0);
this.scopeManager =
new ScopeManager(textualExtractor.getTrapwriter(), config.getEcmaVersion());
}
/*
* Extract all JavaScript snippets appearing in (in-line) script elements and as
* attribute values.
*/
@Override
public void handleElement(Element elt) {
LoCInfo snippetLoC = null;
if (elt.getName().equals(HTMLElementName.SCRIPT)) {
SourceType sourceType = getScriptSourceType(elt, textualExtractor.getExtractedFile());
if (sourceType != null) {
// Jericho sometimes misparses empty elements, which will show up as start tags
// ending in "/"; we manually exclude these cases to avoid spurious syntax
// errors
if (elt.getStartTag().getTagContent().toString().trim().endsWith("/")) return;
Segment content = elt.getContent();
String source = content.toString();
boolean isTypeScript = isTypeScriptTag(elt);
/*
* Script blocks in XHTML files may wrap (parts of) their code inside CDATA
* sections. We need to unwrap them in order not to confuse the JavaScript
* parser.
*
* Note that CDATA sections do not nest, so they can be detected by a regular
* expression.
*
* In order to preserve position information, we replace the CDATA section
* markers with an equivalent number of whitespace characters. This will yield
* surprising results for CDATA sections inside string literals, but those are
* likely to be rare.
*/
source = source.replace("<![CDATA[", " ").replace("]]>", " ");
if (!source.trim().isEmpty()) {
RowColumnVector contentStart = content.getRowColumnVector();
snippetLoC =
extractSnippet(
1,
config.withSourceType(sourceType),
scopeManager,
textualExtractor,
source,
contentStart.getRow(),
contentStart.getColumn(),
isTypeScript);
}
}
} else {
Attributes attributes = elt.getAttributes();
// attributes can be null for directives
if (attributes != null)
for (Attribute attr : attributes) {
// ignore empty attributes
if (attr.getValue() == null || attr.getValue().isEmpty()) continue;
String source = attr.getValue();
RowColumnVector valueStart = attr.getValueSegment().getRowColumnVector();
if (JS_ATTRIBUTE.matcher(attr.getName()).matches()) {
snippetLoC =
extractSnippet(
2,
config,
scopeManager,
textualExtractor,
source,
valueStart.getRow(),
valueStart.getColumn(),
false /* isTypeScript */);
} else if (source.startsWith("javascript:")) {
source = source.substring(11);
snippetLoC =
extractSnippet(
3,
config,
scopeManager,
textualExtractor,
source,
valueStart.getRow(),
valueStart.getColumn() + 11,
false /* isTypeScript */);
}
}
}
if (snippetLoC != null) locInfo.add(snippetLoC);
}
public LoCInfo getLoCInfo() {
return this.locInfo;
}
}
/** List of HTML attributes whose value is interpreted as JavaScript. */
private static final Pattern JS_ATTRIBUTE =
Pattern.compile(
"^on(abort|blur|change|(dbl)?click|error|focus|key(down|press|up)|load|mouse(down|move|out|over|up)|re(set|size)|select|submit|unload)$",
Pattern.CASE_INSENSITIVE);
private final ExtractorConfig config;
private final ExtractorState state;
public HTMLExtractor(ExtractorConfig config, ExtractorState state) {
this.config = config.withPlatform(Platform.WEB);
this.state = state;
}
@Override
public LoCInfo extract(TextualExtractor textualExtractor) {
JavaScriptHTMLElementHandler eltHandler = new JavaScriptHTMLElementHandler(textualExtractor);
HtmlPopulator extractor =
new HtmlPopulator(
this.config.getHtmlHandling(),
textualExtractor.getSource(),
textualExtractor.getTrapwriter(),
textualExtractor.getLocationManager().getFileLabel());
extractor.doit(Option.some(eltHandler));
return eltHandler.getLoCInfo();
}
/**
* Deduce the {@link SourceType} with which the given <code>script</code> element should be
* extracted, returning <code>null</code> if it cannot be determined.
*/
private SourceType getScriptSourceType(Element script, File file) {
String scriptType = getAttributeValueLC(script, "type");
String scriptLanguage = getScriptLanguage(script);
SourceType fallbackSourceType = config.getSourceType();
if (file.getName().endsWith(".vue")) {
fallbackSourceType = SourceType.MODULE;
}
if (isTypeScriptTag(script)) return fallbackSourceType;
// if `type` and `language` are both either missing, contain the
// string "javascript", or if `type` is the string "text/jsx", this is a plain
// script
if ((scriptType == null || scriptType.contains("javascript") || "text/jsx".equals(scriptType))
&& (scriptLanguage == null || scriptLanguage.contains("javascript")))
// use default source type
return fallbackSourceType;
// if `type` is "text/babel", the source type depends on the `data-plugins`
// attribute
if ("text/babel".equals(scriptType)) {
String plugins = getAttributeValueLC(script, "data-plugins");
if (plugins != null && plugins.contains("transform-es2015-modules-umd")) {
return SourceType.MODULE;
}
return fallbackSourceType;
}
// if `type` is "module", extract as module
if ("module".equals(scriptType)) return SourceType.MODULE;
return null;
}
private String getScriptLanguage(Element script) {
String scriptLanguage = getAttributeValueLC(script, "language");
if (scriptLanguage == null) { // Vue templates use 'lang' instead of 'language'.
scriptLanguage = getAttributeValueLC(script, "lang");
}
return scriptLanguage;
}
private boolean isTypeScriptTag(Element script) {
String language = getScriptLanguage(script);
if ("ts".equals(language) || "typescript".equals(language)) return true;
String type = getAttributeValueLC(script, "type");
if (type != null && type.contains("typescript")) return true;
return false;
}
/**
* Get the value of attribute <code>attr</code> of element <code>elt</code> in lower case; if the
* attribute has no value, <code>null</code> is returned.
*/
private String getAttributeValueLC(Element elt, String attr) {
String val = elt.getAttributeValue(attr);
return val == null ? val : StringUtil.lc(val);
}
private LoCInfo extractSnippet(
int toplevelKind,
ExtractorConfig config,
ScopeManager scopeManager,
TextualExtractor textualExtractor,
String source,
int line,
int column,
boolean isTypeScript) {
if (isTypeScript) {
Path file = textualExtractor.getExtractedFile().toPath();
FileSnippet snippet =
new FileSnippet(file, line, column, toplevelKind, config.getSourceType());
VirtualSourceRoot vroot = config.getVirtualSourceRoot();
// Vue files are special in that they can be imported as modules, and may only
// contain one <script> tag.
// For .vue files we omit the usual snippet decoration to ensure the TypeScript
// compiler can find it.
Path virtualFile =
file.getFileName().toString().endsWith(".vue")
? vroot.toVirtualFile(file.resolveSibling(file.getFileName() + ".ts"))
: vroot.getVirtualFileForSnippet(snippet, ".ts");
if (virtualFile != null) {
virtualFile = virtualFile.toAbsolutePath().normalize();
synchronized (vroot.getLock()) {
new WholeIO().strictwrite(virtualFile, source);
}
state.getSnippets().put(virtualFile, snippet);
}
return null; // LoC info is accounted for later
}
TrapWriter trapwriter = textualExtractor.getTrapwriter();
LocationManager locationManager = textualExtractor.getLocationManager();
LocationManager scriptLocationManager =
new LocationManager(
locationManager.getSourceFile(), trapwriter, locationManager.getFileLabel());
scriptLocationManager.setStart(line, column);
JSExtractor extractor = new JSExtractor(config);
try {
TextualExtractor tx =
new TextualExtractor(
trapwriter,
scriptLocationManager,
source,
config.getExtractLines(),
textualExtractor.getMetrics(),
textualExtractor.getExtractedFile());
return extractor.extract(tx, source, toplevelKind, scopeManager).snd();
} catch (ParseError e) {
e.setPosition(scriptLocationManager.translatePosition(e.getPosition()));
throw e.asUserError();
}
}
}
|
JS: Tolerate Angular-specific HTML attribute names
|
javascript/extractor/src/com/semmle/js/extractor/HTMLExtractor.java
|
JS: Tolerate Angular-specific HTML attribute names
|
|
Java
|
mit
|
fb1c3b4ce88a993350135a39d1be9af8868c9c24
| 0
|
dotdog20/SergBoat
|
/*
* MIT License
*
* Copyright (c) 2016 Frederik Ar. Mikkelsen
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package fredboat.command.music.control;
import fredboat.FredBoat;
import fredboat.audio.GuildPlayer;
import fredboat.audio.PlayerRegistry;
import fredboat.commandmeta.MessagingException;
import fredboat.commandmeta.abs.Command;
import fredboat.commandmeta.abs.IMusicCommand;
import fredboat.util.RestActionScheduler;
import net.dv8tion.jda.core.entities.Guild;
import net.dv8tion.jda.core.entities.Member;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.TextChannel;
import java.util.concurrent.TimeUnit;
public class VolumeCommand extends Command implements IMusicCommand {
@Override
public void onInvoke(Guild guild, TextChannel channel, Member invoker, Message message, String[] args) {
if(FredBoat.distribution.volumeSupported()) {
GuildPlayer player = PlayerRegistry.get(guild);
try {
float volume = Float.parseFloat(args[1]) / 100;
volume = Math.max(0, Math.min(1.5f, volume));
channel.sendMessage("Changed volume from **" + (int) Math.floor(player.getVolume() * 100) + "%** to **" + (int) Math.floor(volume * 100) + "%**.").queue();
player.setVolume(volume);
} catch (NumberFormatException | ArrayIndexOutOfBoundsException ex) {
throw new MessagingException("Use `;;volume <0-150>`. " + (int) (100 * PlayerRegistry.DEFAULT_VOLUME) + "% is the default.\nThe player is currently at **" + (int) Math.floor(player.getVolume() * 100) + "%**.");
}
} else {
channel.sendMessage("Sorry! The ;;volume command has now been deprecated on the public music bot. "
+ "This is because of how it causes the bot to spend a lot more time processing audio, some tracks up to 5 times more, causing everyone to hear stutter. "
+ "By disabling this feature FredBoat can play much more music without lag.\n"
+ "I recommend setting the bot's volume via the dropdown menu https://fred.moe/1vD.png").queue(message1 -> RestActionScheduler.schedule(
message1.deleteMessage(),
2,
TimeUnit.MINUTES
));
}
}
}
|
FredBoat/src/main/java/fredboat/command/music/control/VolumeCommand.java
|
/*
* MIT License
*
* Copyright (c) 2016 Frederik Ar. Mikkelsen
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package fredboat.command.music.control;
import fredboat.FredBoat;
import fredboat.audio.GuildPlayer;
import fredboat.audio.PlayerRegistry;
import fredboat.commandmeta.MessagingException;
import fredboat.commandmeta.abs.Command;
import fredboat.commandmeta.abs.IMusicCommand;
import fredboat.util.RestActionScheduler;
import net.dv8tion.jda.core.entities.Guild;
import net.dv8tion.jda.core.entities.Member;
import net.dv8tion.jda.core.entities.Message;
import net.dv8tion.jda.core.entities.TextChannel;
import java.util.concurrent.TimeUnit;
public class VolumeCommand extends Command implements IMusicCommand {
@Override
public void onInvoke(Guild guild, TextChannel channel, Member invoker, Message message, String[] args) {
if(FredBoat.distribution.volumeSupported()) {
GuildPlayer player = PlayerRegistry.get(guild);
try {
float volume = Float.parseFloat(args[1]) / 100;
volume = Math.max(0, Math.min(2.0f, volume));
channel.sendMessage("Changed volume from **" + (int) Math.floor(player.getVolume() * 100) + "%** to **" + (int) Math.floor(volume * 100) + "%**.").queue();
player.setVolume(volume);
} catch (NumberFormatException | ArrayIndexOutOfBoundsException ex) {
throw new MessagingException("Use `;;volume <0-150>`. " + (int) (100 * PlayerRegistry.DEFAULT_VOLUME) + "% is the default.\nThe player is currently at **" + (int) Math.floor(player.getVolume() * 100) + "%**.");
}
} else {
channel.sendMessage("Sorry! The ;;volume command has now been deprecated on the public music bot. "
+ "This is because of how it causes the bot to spend a lot more time processing audio, some tracks up to 5 times more, causing everyone to hear stutter. "
+ "By disabling this feature FredBoat can play much more music without lag.\n"
+ "I recommend setting the bot's volume via the dropdown menu https://fred.moe/1vD.png").queue(message1 -> RestActionScheduler.schedule(
message1.deleteMessage(),
2,
TimeUnit.MINUTES
));
}
}
}
|
Revert "Change max vol from 1.5 to 2.0"
This reverts commit 5fece2a683167e6a2c26ae79c63862440d67196f.
|
FredBoat/src/main/java/fredboat/command/music/control/VolumeCommand.java
|
Revert "Change max vol from 1.5 to 2.0"
|
|
Java
|
mit
|
bbea25b0c785a0301b501fcccaa1ef8e5c6b04b4
| 0
|
intuit/karate,intuit/karate,intuit/karate,intuit/karate
|
package com.intuit.karate;
import com.intuit.karate.core.MatchType;
import com.intuit.karate.core.FeatureContext;
import com.intuit.karate.core.ScenarioContext;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.InvalidJsonException;
import com.jayway.jsonpath.JsonPath;
import java.io.ByteArrayInputStream;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
import org.w3c.dom.Document;
/**
*
* @author pthomas3
*/
public class ScriptTest {
private static final Logger logger = LoggerFactory.getLogger(ScriptTest.class);
private ScenarioContext getContext() {
Path featureDir = FileUtils.getPathContaining(getClass());
FeatureContext featureContext = FeatureContext.forWorkingDir("dev", featureDir.toFile());
CallContext callContext = new CallContext(null, true);
return new ScenarioContext(featureContext, callContext, null, null);
}
private AssertionResult matchJsonObject(Object act, Object exp, ScenarioContext context) {
return Script.matchNestedObject('.', "$", MatchType.EQUALS, null, null, act, exp, context);
}
@Test
public void testParsingTextType() {
assertTrue(Script.isVariable("foo"));
assertTrue(Script.isXmlPath("/foo"));
assertTrue(Script.isXmlPath("//foo"));
assertTrue(Script.isXmlPathFunction("lower-case('Foo')"));
assertTrue(Script.isXmlPathFunction("count(/journal/article)"));
assertTrue(Script.isVariableAndSpaceAndPath("foo count(/journal/article)"));
assertTrue(Script.isVariableAndSpaceAndPath("foo $"));
}
@Test
public void testEvalPrimitives() {
ScenarioContext ctx = getContext();
ctx.vars.put("foo", "bar");
ctx.vars.put("a", 1);
ctx.vars.put("b", 2);
String expression = "foo + 'baz'";
ScriptValue value = Script.evalJsExpression(expression, ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("barbaz", value.getValue());
value = Script.evalJsExpression("a + b", ctx);
assertEquals(ScriptValue.Type.PRIMITIVE, value.getType());
assertEquals(3.0, value.getValue());
}
@Test
public void testMatchPrimitiveStrings() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", "3");
ctx.vars.put("b", 3);
assertFalse(Script.matchNamed(MatchType.EQUALS, "a", null, "b", ctx).pass);
}
@Test
public void testEvalMapsAndLists() {
ScenarioContext ctx = getContext();
Map<String, Object> testMap = new HashMap<>();
testMap.put("foo", "bar");
testMap.put("baz", 5);
List<Integer> testList = new ArrayList<>();
testList.add(1);
testList.add(2);
testMap.put("myList", testList);
ctx.vars.put("myMap", testMap);
String expression = "myMap.foo + myMap.baz";
ScriptValue value = Script.evalJsExpression(expression, ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("bar5", value.getValue());
value = Script.evalJsExpression("myMap.myList[0] + myMap.myList[1]", ctx);
assertEquals(ScriptValue.Type.PRIMITIVE, value.getType());
assertEquals(3.0, value.getValue());
}
@Test
public void testEvalJsonDocuments() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar', baz: [1, 2], ban: { hello: 'world' } }");
ctx.vars.put("myJson", doc);
ScriptValue value = Script.evalJsExpression("myJson.foo", ctx);
assertEquals("bar", value.getValue());
value = Script.evalJsExpression("myJson.baz[1]", ctx);
assertEquals(2, value.getValue());
value = Script.evalJsExpression("myJson.ban.hello", ctx);
assertEquals("world", value.getValue());
}
@Test
public void testEvalXmlDocuments() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo>bar</foo><hello>world</hello></root>");
ctx.vars.put("myXml", doc);
ScriptValue value = Script.evalJsExpression("myXml.root.foo", ctx);
assertEquals("bar", value.getValue());
}
@Test
public void testAssignXmlWithLineBreaksAndMatchJson() {
ScenarioContext ctx = getContext();
Script.assign("foo", "<records>\n <record>a</record>\n <record>b</record>\n <record>c</record>\n</records>", ctx);
Script.assign("bar", "foo.records", ctx);
ScriptValue value = ctx.vars.get("bar");
assertTrue(value.getType() == ScriptValue.Type.MAP);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar.record", null, "['a', 'b', 'c']", ctx).pass);
assertTrue(Script.assertBoolean("foo.records.record.length == 3", ctx).pass);
}
@Test
public void testAssignXmlWithLineBreaksAndNullElements() {
ScenarioContext ctx = getContext();
Script.assign("foo", "<records>\n <record>a</record>\n <record/>\n</records>", ctx);
Script.assign("bar", "foo.records", ctx);
ScriptValue value = ctx.vars.get("bar");
assertTrue(value.getType() == ScriptValue.Type.MAP);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar.record", null, "['a', null]", ctx).pass);
}
@Test
public void testJsonPathOnVarsByName() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar', baz: [1, 2], ban: { hello: 'world' } }");
ctx.vars.put("myJson", doc);
ScriptValue value = Script.evalJsonPathOnVarByName("myJson", "$.foo", ctx);
assertEquals("bar", value.getValue());
value = Script.evalKarateExpression("myJson.foo", ctx);
assertEquals("bar", value.getValue());
value = Script.evalJsonPathOnVarByName("myJson", "$.baz[1]", ctx);
assertEquals(2, value.getValue());
value = Script.evalKarateExpression("myJson.baz[1]", ctx);
assertEquals(2, value.getValue());
value = Script.evalJsonPathOnVarByName("myJson", "$.baz", ctx);
assertEquals(ScriptValue.Type.LIST, value.getType());
value = Script.evalJsonPathOnVarByName("myJson", "$.ban", ctx);
assertEquals(ScriptValue.Type.MAP, value.getType());
}
@Test
public void testXmlPathOnVarsByName() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo>bar</foo></root>");
ctx.vars.put("myXml", doc);
ScriptValue value = Script.evalXmlPathOnVarByName("myXml", "/root/foo", ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("bar", value.getAsString());
value = Script.evalKarateExpression("$myXml/root/foo", ctx);
assertEquals("bar", value.getAsString());
}
@Test
public void testEvalXmlEmbeddedExpressions() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", 1);
ctx.vars.put("b", 2);
Document doc = XmlUtils.toXmlDoc("<root><foo>#(a + b)</foo></root>");
Script.evalXmlEmbeddedExpressions(doc, ctx);
ctx.vars.put("myXml", doc);
ScriptValue value = Script.evalXmlPathOnVarByName("myXml", "/root/foo", ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("3.0", value.getAsString());
}
@Test
public void testEvalXmlEmbeddedExpressionsThatReturnChunks() {
ScenarioContext ctx = getContext();
Script.assign("hello", "<hello>world</hello>", ctx);
Script.assign("xml", "<foo><bar>#(hello)</bar></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar><hello>world</hello></bar></foo>", ctx).pass);
}
@Test
public void testEvalXmlEmbeddedExpressionsThatReturnNull() {
ScenarioContext ctx = getContext();
Script.assign("hello", "null", ctx);
Script.assign("xml", "<foo><bar>#(hello)</bar></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar></bar></foo>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar/></foo>", ctx).pass);
}
@Test
public void testEvalXmlEmbeddedExpressionsInAttributes() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", 5);
String xml = "<foo bar=\"#(a)\">#(a)</foo>";
Document doc = XmlUtils.toXmlDoc(xml);
Script.evalXmlEmbeddedExpressions(doc, ctx);
String result = XmlUtils.toString(doc);
logger.debug("result: {}", result);
assertTrue(result.endsWith("<foo bar=\"5\">5</foo>"));
}
@Test
public void testEvalXmlEmbeddedOptionalExpressionsInAttributes() {
ScenarioContext ctx = getContext();
Script.assign("a", "null", ctx);
Script.assign("xml", "<foo bar=\"##(a)\">baz</foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo>baz</foo>", ctx).pass);
}
@Test
public void testEvalXmlEmbeddedOptionalExpressions() {
ScenarioContext ctx = getContext();
Script.assign("a", "null", ctx);
Script.assign("xml", "<foo><a>hello</a><b>##(a)</b></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><a>hello</a></foo>", ctx).pass);
}
@Test
public void testEvalJsonEmbeddedExpressions() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", 1);
ctx.vars.put("b", 2);
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: '#(a + b)' }");
Script.evalJsonEmbeddedExpressions(doc, ctx);
ctx.vars.put("myJson", doc);
ScriptValue value = Script.evalJsonPathOnVarByName("myJson", "$.foo", ctx);
assertEquals(ScriptValue.Type.PRIMITIVE, value.getType());
assertEquals(3.0, value.getValue());
}
@Test
public void testEvalEmbeddedExpressionsWithJsonPath() {
ScenarioContext ctx = getContext();
String ticket = "{ ticket: 'my-ticket', userId: '12345' }";
ctx.vars.put("ticket", JsonUtils.toJsonDoc(ticket));
String json = "{ foo: '#(ticket.userId)' }";
DocumentContext doc = JsonUtils.toJsonDoc(json);
Script.evalJsonEmbeddedExpressions(doc, ctx);
String result = doc.jsonString();
logger.debug("result: {}", result);
assertEquals("{\"foo\":\"12345\"}", result);
}
@Test
public void testEvalEmbeddedExpressionsWithJsonPathsWhichAreTricky() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ a: 1, b: 2, c: 3 }", ctx);
Script.assign("bar", "{ 'sp ace': '#(foo.a)', 'hy-phen': '#(foo.b)', 'full.stop': '#(foo.c)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar", null, "{ 'sp ace': 1, 'hy-phen': 2, 'full.stop': 3 }", ctx).pass);
}
@Test
public void testEvalEmbeddedOptionalExpressions() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ a: null, b: null }", ctx);
Script.assign("bar", "{ hello: '#(foo.a)', world: '##(foo.b)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar", null, "{ hello: null }", ctx).pass);
}
@Test
public void testEvalEmbeddedExpressionStream() {
ScenarioContext ctx = getContext();
ctx.vars.put("inputStream", new ScriptValue(new ByteArrayInputStream("hello world".getBytes())));
Script.assign("doc", "{ foo: '#(inputStream)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "doc", null, "{ foo: 'hello world' }", ctx).pass);
}
@Test
public void testVariableNameValidation() {
assertTrue(Script.isValidVariableName("foo"));
assertTrue(Script.isValidVariableName("foo_bar"));
assertTrue(Script.isValidVariableName("foo_"));
assertTrue(Script.isValidVariableName("foo1"));
assertTrue(Script.isValidVariableName("a"));
assertTrue(Script.isValidVariableName("a1"));
// bad
assertFalse(Script.isValidVariableName("foo.bar"));
assertFalse(Script.isValidVariableName("foo-bar"));
assertFalse(Script.isValidVariableName("$foo"));
assertFalse(Script.isValidVariableName("$foo/bar"));
assertFalse(Script.isValidVariableName("_foo"));
assertFalse(Script.isValidVariableName("_foo_"));
assertFalse(Script.isValidVariableName("0"));
assertFalse(Script.isValidVariableName("2foo"));
}
@Test
public void testMatchMapObjects() {
ScenarioContext ctx = getContext();
Map<String, Object> left = new HashMap<>();
left.put("foo", "bar");
Map<String, Object> right = new HashMap<>();
right.put("foo", "bar");
assertTrue(matchJsonObject(left, right, ctx).pass);
right.put("baz", "#ignore");
assertTrue(matchJsonObject(left, right, ctx).pass);
right.put("baz", "#notpresent");
assertTrue(matchJsonObject(left, right, ctx).pass);
left.put("baz", Arrays.asList(1, 2, 3));
right.put("baz", Arrays.asList(1, 2, 3));
assertTrue(matchJsonObject(left, right, ctx).pass);
left.put("baz", Arrays.asList(1, 2));
assertFalse(matchJsonObject(left, right, ctx).pass);
Map<String, Object> leftChild = new HashMap<>();
leftChild.put("a", 1);
Map<String, Object> rightChild = new HashMap<>();
rightChild.put("a", 1);
left.put("baz", leftChild);
right.put("baz", rightChild);
assertTrue(matchJsonObject(left, right, ctx).pass);
List<Map> leftList = new ArrayList<>();
leftList.add(leftChild);
List<Map> rightList = new ArrayList<>();
rightList.add(rightChild);
left.put("baz", leftList);
right.put("baz", rightList);
assertTrue(matchJsonObject(left, right, ctx).pass);
rightChild.put("a", 2);
assertFalse(matchJsonObject(left, right, ctx).pass);
rightChild.put("a", "#ignore");
assertTrue(matchJsonObject(left, right, ctx).pass);
}
@Test
public void testMatchListObjects() {
List left = new ArrayList();
List right = new ArrayList();
Map<String, Object> leftChild = new HashMap<>();
leftChild.put("a", 1);
left.add(leftChild);
Map<String, Object> rightChild = new HashMap<>();
rightChild.put("a", 1);
right.add(rightChild);
assertTrue(matchJsonObject(left, right, null).pass);
}
@Test
public void testMatchJsonPath() {
DocumentContext doc = JsonPath.parse("{ foo: 'bar', baz: { ban: [1, 2, 3]} }");
ScenarioContext ctx = getContext();
ctx.vars.put("myJson", doc);
ScriptValue myJson = ctx.vars.get("myJson");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.foo", "'bar'", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.baz", "{ ban: [1, 2, 3]} }", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.baz.ban[1]", "2", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.baz", "{ ban: [1, '#ignore', 3]} }", ctx).pass);
}
@Test
public void testMatchJsonPathThatReturnsList() {
DocumentContext doc = JsonPath.parse("{ foo: [{ bar: 1}, {bar: 2}, {bar: 3}]}");
ScenarioContext ctx = getContext();
ctx.vars.put("json", doc);
Script.assign("list", "json.foo", ctx);
ScriptValue list = ctx.vars.get("list");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, list, "$[0]", "{ bar: 1}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, list, "$[0].bar", "1", ctx).pass);
}
@Test
public void testMatchJsonPathOnLeftHandSide() {
ScenarioContext ctx = getContext();
String json = "[\n"
+ " {\n"
+ " \"a\": \"a\",\n"
+ " \"b\": \"a\",\n"
+ " \"c\": \"a\",\n"
+ " },\n"
+ " {\n"
+ " \"a\": \"ab\",\n"
+ " \"b\": \"ab\",\n"
+ " \"c\": \"ab\",\n"
+ " }\n"
+ "]";
Script.assign("response", json, ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response[?(@.b=='ab')]", null, "'#[1]'", ctx).pass);
}
@Test
public void testMatchAllJsonPath() {
DocumentContext doc = JsonPath.parse("{ foo: [{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]}");
ScenarioContext ctx = getContext();
ctx.vars.put("myJson", doc);
ScriptValue myJson = ctx.vars.get("myJson");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.NOT_CONTAINS, myJson, "$.foo", "[{bar: 1, baz: 'a'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.NOT_CONTAINS, myJson, "$.foo", "[{bar: 9, baz: 'z'}, {bar: 99, baz: 'zz'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_ONLY, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_ANY, myJson, "$.foo", "[{bar: 9, baz: 'z'}, {bar: 2, baz: 'b'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 2}, {bar:3}]", ctx).pass);
// shuffle
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_ONLY, myJson, "$.foo", "[{bar: 2, baz: 'b'}, {bar:3, baz: 'c'}, {bar: 1, baz: 'a'}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_ONLY, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_EQUALS, myJson, "$.foo", "{bar:'#number', baz:'#string'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_CONTAINS, myJson, "$.foo", "{bar:'#number'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_CONTAINS, myJson, "$.foo", "{baz:'#string'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 2}, {bar:3}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_NOT_CONTAINS, myJson, "$.foo", "{baz:'z'}", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.EACH_NOT_CONTAINS, myJson, "$.foo", "{baz:'a'}", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.EACH_EQUALS, myJson, "$.foo", "{bar:'#? _ < 3', baz:'#string'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{foo: [{bar: 1}, {bar: 2}, {bar:3}]}", ctx).pass);
}
@Test
public void testMatchContainsDeep() {
DocumentContext doc = JsonPath.parse("{ foo: [{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}], eoo: { doo: { car: 1, caz: 'a'} } }");
ScenarioContext ctx = getContext();
ctx.vars.put("myJson", doc);
ScriptValue myJson = ctx.vars.get("myJson");
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 2}, {bar:3}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 4}, {bar:3}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1, baz: 'a'}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1, baz: 'a', 'baq': 'b'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{baz: 'a'}, {bar: 2}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{baz: 'a'}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{eoo: '#ignore'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{eoo: { doo: {car: 1} } }", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{eoo: { doo: {car: 'a'} } }", ctx).pass);
}
@Test
public void testMatchNotEquals() {
ScenarioContext ctx = getContext();
Script.assign("temp", "[1, 2]", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[1]'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[2]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[]? _ > 2'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[]? _ > 0'", ctx).pass);
Script.assign("temp", "'foo'", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#regex .{2}'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#regex .{3}'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#? _.length == 2'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#? _.length == 3'", ctx).pass);
Script.assign("json", "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "null", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "1", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "1", ctx).pass);
Script.assign("json", "1", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "1", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "1", ctx).pass);
Script.assign("nope", "{ foo: '#number' }", ctx);
Script.assign("json", "{ foo: 'bar' }", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'#(^nope)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'#(nope)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'#array'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'foo'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "[]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "1", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ foo: 'bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{}", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ foo: 'blah' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ foo: 'bar', baz: 'ban' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ foo: 'blah' }", ctx).pass);
Script.assign("json", "[{ foo: 'bar'}, { foo: 'baz' }]", ctx);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "[{ foo: 'bar'}, { foo: 'baz' }]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "[{ foo: 'bar'}, { foo: 'blah' }]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ foo: 'blah' }", ctx).pass);
}
@Test
public void testMatchJsonObjectReturnedFromJs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return { foo: 'bar' } }", ctx);
Script.assign("json", "{ foo: 'bar' }", ctx);
Script.assign("expected", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "expected", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "fun()", ctx).pass);
}
@Test
public void testMatchJsonArrayReturnedFromJs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [ 'foo', 'bar', 'baz' ] }", ctx);
Script.assign("json", "[ 'foo', 'bar', 'baz' ]", ctx);
Script.assign("expected", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "expected", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "fun()", ctx).pass);
}
@Test
public void testMatchJsonPathOnResponse() {
DocumentContext doc = JsonPath.parse("{ foo: 'bar' }");
ScenarioContext ctx = getContext();
ctx.vars.put("response", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "$", null, "{ foo: 'bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "$.foo", null, "'bar'", ctx).pass);
}
private final String ACTUAL = "{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"bef90f66-bb57-4fea-83aa-a0acc42b0426\"},\"primaryId\":\"bef90f66-bb57-4fea-83aa-a0acc42b0426\",\"created\":{\"on\":\"2016-02-28T05:56:48.485+0000\"},\"lastUpdated\":{\"on\":\"2016-02-28T05:56:49.038+0000\"},\"organization\":{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"631fafe9-8822-4c82-b4a4-8735b202c16c\"},\"created\":{\"on\":\"2016-02-28T05:56:48.486+0000\"},\"lastUpdated\":{\"on\":\"2016-02-28T05:56:49.038+0000\"}},\"clientState\":\"ACTIVE\"}";
private final String EXPECTED = "{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"#ignore\"},\"primaryId\":\"#ignore\",\"created\":{\"on\":\"#ignore\"},\"lastUpdated\":{\"on\":\"#ignore\"},\"organization\":{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"#ignore\"},\"created\":{\"on\":\"#ignore\"},\"lastUpdated\":{\"on\":\"#ignore\"}},\"clientState\":\"ACTIVE\"}";
@Test
public void testMatchTwoJsonDocsWithIgnores() {
DocumentContext actual = JsonPath.parse(ACTUAL);
DocumentContext expected = JsonPath.parse(EXPECTED);
ScenarioContext ctx = getContext();
ctx.vars.put("actual", actual);
ctx.vars.put("expected", expected);
ScriptValue act = ctx.vars.get("actual");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, act, "$", "expected", ctx).pass);
}
@Test
public void testMatchXmlPathThatReturnsTextNode() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo>bar</foo><hello>world</hello></root>");
ctx.vars.put("myXml", doc);
ScriptValue myXml = ctx.vars.get("myXml");
assertTrue(Script.matchXml(MatchType.EQUALS, myXml, "/root/foo", "'bar'", ctx).pass);
assertTrue(Script.matchXml(MatchType.EQUALS, myXml, "/root/hello", "'world'", ctx).pass);
}
@Test
public void testMatchXmlPathThatReturnsXmlChunk() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo><bar>baz</bar></foo></root>");
ctx.vars.put("myXml", doc);
ScriptValue myXml = ctx.vars.get("myXml");
assertTrue(Script.matchXml(MatchType.EQUALS, myXml, "/root/foo", "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testMatchXmlPathThatReturnsNull() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo>bar</foo></root>", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "myXml//baz", null, "<baz>1</baz>", ctx).pass);
}
@Test
public void testMatchXmlEmptyAndNotPresent() {
ScenarioContext ctx = getContext();
Script.assign("xml", "<root><foo>bar</foo><baz/><ban></ban></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/foo", null, "'bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/baz", null, "''", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/ban", null, "''", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/foo", null, "'#present'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "xml/root/foo", null, "'#notpresent'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/nope", null, "'#notpresent'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "xml/root/nope", null, "'#present'", ctx).pass);
}
@Test
public void testJsonEmptyAndNotPresent() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: 'bar' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'#present'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'#notpresent'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'#notpresent'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'#ignore'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'#ignore'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'##string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'##string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'##number'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'#present'", ctx).pass);
}
@Test
public void testAssignAndMatchXmlText() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo>bar</foo></root>", ctx);
Script.assign("myStr", "$myXml/root/foo", ctx);
assertTrue(Script.assertBoolean("myStr == 'bar'", ctx).pass);
}
@Test
public void testAssignAndMatchXmlChunk() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myChunk", "$myXml/root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myChunk", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testAssignAndMatchXmlChunkByVariableReference() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myChunk", "$myXml/root/foo", ctx);
Script.assign("expected", "<foo><bar>baz</bar></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myChunk", null, "expected", ctx).pass);
}
@Test
public void testAssignAndMatchXmlPathChunk() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/root/foo", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testAssignAndMatchXmlPathThatReturnsNodeListAgainstJsonArray() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>one</bar><bar>two</bar></foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/root/foo/bar", null, "['one', 'two']", ctx).pass);
}
@Test
public void testAssignAndMatchXmlPathThatReturnsNodeListAgainstList() {
ScenarioContext ctx = getContext();
Script.assign("myJson", "[{ val: 'one' }, { val: 'two' }]", ctx);
Script.assign("myList", "get myJson $[*].val", ctx);
Script.assign("myXml", "<root><foo><bar>one</bar><bar>two</bar></foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/root/foo/bar", null, "myList", ctx).pass);
}
@Test
public void testMatchXmlPathAutoConvertingFromMap() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myMap", "myXml", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myMap/root/foo", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testEvalXmlPathAutoConvertingFromMap() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myMap", "myXml", ctx);
Script.assign("temp", "get myXml /root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "<foo><bar>baz</bar></foo>", ctx).pass);
Script.assign("temp", "get myMap /root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testAssignXmlPathThatReturnsListThenMatch() {
ScenarioContext ctx = getContext();
Script.assign("response", XmlUtilsTest.TEACHERS_XML, ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "//teacher[@department='science']/subject", null, "['math', 'physics']", ctx).pass);
Script.assign("subjects", "//teacher[@department='science']/subject", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "subjects", null, "['physics', 'math']", ctx).pass);
Script.assign("teachers", "response", ctx); // becomes a map
Script.assign("subjects", "get teachers //teacher[@department='science']/subject", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "subjects", null, "['math', 'physics']", ctx).pass);
}
@Test
public void testRunningJsonPathOnStringAutoConvertsStringToJson() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.STRING, "response", "{ foo: { hello: 'world' } }", ctx, true);
Script.assign("foo", "$response.foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ hello: 'world' }", ctx).pass);
Script.assign("foo", "$.foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ hello: 'world' }", ctx).pass);
}
@Test
public void testCastJsonToString() {
ScenarioContext ctx = getContext();
Script.assign("myJson", "{ root: { foo: 'bar' } }", ctx);
Script.assign(AssignType.STRING, "myString", "myJson", ctx, true);
ScriptValue value = ctx.vars.get("myString");
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("{\"root\":{\"foo\":\"bar\"}}", value.getAsString());
}
@Test
public void testCastStringToJson() {
ScenarioContext ctx = getContext();
Script.assign("myString", "{\"root\":{\"foo\":\"bar\"}}", ctx);
Script.assign(AssignType.JSON, "myJson", "myString", ctx, true);
ScriptValue value = ctx.vars.get("myJson");
assertEquals(ScriptValue.Type.JSON, value.getType());
assertEquals("{\"root\":{\"foo\":\"bar\"}}", value.getAsString());
}
@Test
public void testCastJsonToXml() {
ScenarioContext ctx = getContext();
Script.assign("myJson", "{ root: { foo: 'bar' } }", ctx);
Script.assign(AssignType.XML, "myXml", "myJson", ctx, true);
ScriptValue value = ctx.vars.get("myXml");
assertEquals(ScriptValue.Type.XML, value.getType());
assertEquals("<root><foo>bar</foo></root>", value.getAsString());
}
@Test
public void testCastStringToXml() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.STRING, "myString", "<root><foo>bar</foo></root>", ctx, true);
Script.assign(AssignType.XML, "myXml", "myString", ctx, true);
ScriptValue value = ctx.vars.get("myXml");
assertEquals(ScriptValue.Type.XML, value.getType());
assertEquals("<root><foo>bar</foo></root>", value.getAsString());
}
@Test
public void testCastXmlToString() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo>bar</foo></root>", ctx);
Script.assign(AssignType.XML_STRING, "myString", "myXml", ctx, true);
ScriptValue value = ctx.vars.get("myString");
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("<root><foo>bar</foo></root>", value.getValue());
}
@Test
public void testCastPojoToJson() {
ScenarioContext ctx = getContext();
Script.assign("pojo", "new com.intuit.karate.SimplePojo()", ctx);
Script.assign(AssignType.JSON, "json", "pojo", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: null, bar: 0 }", ctx).pass);
}
@Test
public void testCastPojoToXml() {
ScenarioContext ctx = getContext();
Script.assign("pojo", "new com.intuit.karate.SimplePojo()", ctx);
Script.assign(AssignType.XML, "xml", "pojo", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo></foo><bar>0</bar></root>", ctx).pass);
}
@Test
public void testXmlShortCutsForResponse() {
ScenarioContext ctx = getContext();
Script.assign("response", "<root><foo>bar</foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response", "/", "<root><foo>bar</foo></root>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response/", null, "<root><foo>bar</foo></root>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response", null, "<root><foo>bar</foo></root>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "/", null, "<root><foo>bar</foo></root>", ctx).pass);
}
@Test
public void testMatchXmlButUsingJsonPath() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<cat><name>Billie</name><scores><score>2</score><score>5</score></scores></cat>");
ctx.vars.put("myXml", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/cat/scores/score[2]", null, "'5'", ctx).pass);
// using json path for xml !
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml.cat.scores.score[1]", null, "'5'", ctx).pass);
}
@Test
public void testXmlStringConversion() {
ScenarioContext ctx = getContext();
Script.assign("response", "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx);
Script.assign(AssignType.XML_STRING, "temp", "response", ctx, false);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx).pass);
// XML DOM parsing unfortunately re-orders attributes
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "'<foo><bar aaa=\"1\" bbb=\"2\"/></foo>'", ctx).pass);
}
@Test
public void testXmlStringConversionInJs() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.AUTO, "response", "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx, false);
Script.assign(AssignType.XML, "xml", "karate.prettyXml(response)", ctx, false);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx).pass);
Script.assign(AssignType.AUTO, "temp", "karate.prettyXml(response)", ctx, false);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "temp", null, "'<bar aaa=\"1\" bbb=\"2\"/>'", ctx).pass);
}
@Test
public void testMatchXmlRepeatedElements() {
ScenarioContext ctx = getContext();
String xml = "<foo><bar>baz1</bar><bar>baz2</bar></foo>";
Document doc = XmlUtils.toXmlDoc(xml);
ctx.vars.put(ScriptValueMap.VAR_RESPONSE, doc);
ScriptValue response = ctx.vars.get(ScriptValueMap.VAR_RESPONSE);
assertTrue(Script.matchXml(MatchType.EQUALS, response, "/", "<foo><bar>baz1</bar><bar>baz2</bar></foo>", ctx).pass);
assertTrue(Script.matchXml(MatchType.EQUALS, response, "/foo/bar[2]", "'baz2'", ctx).pass);
assertTrue(Script.matchXml(MatchType.EQUALS, response, "/foo/bar[1]", "'baz1'", ctx).pass);
}
@Test
public void testMatchXmlAttributeErrorReporting() {
ScenarioContext ctx = getContext();
Script.assign("xml", "<hello foo=\"bar\">world</hello>", ctx);
ScriptValue xml = ctx.vars.get("xml");
assertTrue(Script.matchXml(MatchType.EQUALS, xml, "/", "<hello foo=\"bar\">world</hello>", ctx).pass);
AssertionResult ar = Script.matchXml(MatchType.EQUALS, xml, "/", "<hello foo=\"baz\">world</hello>", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("/hello/@foo"));
}
@Test
public void testAssigningAndCallingFunctionThatUpdatesVars() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(){ return { bar: 'baz' } }", ctx);
ScriptValue testFoo = ctx.vars.get("foo");
assertEquals(ScriptValue.Type.JS_FUNCTION, testFoo.getType());
Script.callAndUpdateConfigAndAlsoVarsIfMapReturned(false, "foo", null, ctx);
ScriptValue testBar = ctx.vars.get("bar");
assertEquals("baz", testBar.getValue());
}
@Test
public void testAssigningAndCallingFunctionThatCanBeUsedToAssignVariable() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(){ return 'world' }", ctx);
Script.assign("hello", "call foo", ctx);
ScriptValue hello = ctx.vars.get("hello");
assertEquals("world", hello.getValue());
}
@Test
public void testAssigningAndCallingFunctionWithArgumentsThatCanBeUsedToAssignVariable() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(pre){ return pre + ' world' }", ctx);
Script.assign("hello", "call foo 'hello'", ctx);
ScriptValue hello = ctx.vars.get("hello");
assertEquals("hello world", hello.getValue());
}
@Test
public void testCallingFunctionThatTakesPrimitiveArgument() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return { bar: a } }", ctx);
ScriptValue testFoo = ctx.vars.get("foo");
assertEquals(ScriptValue.Type.JS_FUNCTION, testFoo.getType());
Script.callAndUpdateConfigAndAlsoVarsIfMapReturned(false, "foo", "'hello'", ctx);
ScriptValue testBar = ctx.vars.get("bar");
assertEquals("hello", testBar.getValue());
}
@Test
public void testCallingFunctionThatTakesJsonArgument() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return { bar: a.hello } }", ctx);
ScriptValue testFoo = ctx.vars.get("foo");
assertEquals(ScriptValue.Type.JS_FUNCTION, testFoo.getType());
Script.callAndUpdateConfigAndAlsoVarsIfMapReturned(false, "foo", "{ hello: 'world' }", ctx);
ScriptValue testBar = ctx.vars.get("bar");
assertEquals("world", testBar.getValue());
}
@Test
public void testCallingFunctionWithJsonArray() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return a[0] }", ctx);
Script.assign("bar", "call foo ['hello']", ctx);
ScriptValue bar = ctx.vars.get("bar");
assertEquals("hello", bar.getValue());
}
@Test
public void testCallingFunctionWithJavaList() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return a[0] }", ctx);
Script.assign("bar", "['hello']", ctx);
Script.assign("baz", "call foo bar", ctx);
ScriptValue baz = ctx.vars.get("baz");
assertEquals("hello", baz.getValue());
}
@Test
public void testCallingFunctionThatUsesJsonPath() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: [{baz: 1}, {baz: 2}, {baz: 3}]}", ctx);
Script.assign("fun", "function(){ return karate.get('$foo.bar[*].baz') }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "[1, 2, 3]", ctx).pass);
// 'normal' variable name
Script.assign("fun", "function(){ return karate.get('foo') }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "{ bar: [{baz: 1}, {baz: 2}, {baz: 3}]}", ctx).pass);
}
@Test
public void testCallingFunctionWithJsonArrayReturnedFromAnotherFunction() {
ScenarioContext ctx = getContext();
Script.assign("fun1", "function(){ return [1, 2, 3] }", ctx);
Script.assign("res1", "call fun1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res1", null, "[1, 2, 3]", ctx).pass);
Script.assign("fun2", "function(arg){ return arg.length }", ctx);
Script.assign("res2", "call fun2 res1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res2", null, "3", ctx).pass);
}
@Test
public void testCallingFunctionWithJsonReturnedFromAnotherFunction() {
ScenarioContext ctx = getContext();
Script.assign("fun1", "function(){ return { foo: 'bar' } }", ctx);
Script.assign("res1", "call fun1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res1", null, "{ foo: 'bar' }", ctx).pass);
Script.assign("fun2", "function(arg){ return arg.foo }", ctx);
Script.assign("res2", "call fun2 res1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res2", null, "'bar'", ctx).pass);
}
@Test
public void testCallingFunctionWithStringReturnedFromAnotherFunction() {
ScenarioContext ctx = getContext();
Script.assign("fun1", "function(){ return 'foo' }", ctx);
Script.assign("res1", "call fun1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res1", null, "'foo'", ctx).pass);
Script.assign("fun2", "function(arg){ return arg + 'bar' }", ctx);
Script.assign("res2", "call fun2 res1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res2", null, "'foobar'", ctx).pass);
}
@Test
public void testJsonReturnedFromJsRead() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return read('classpath:test.json') }", ctx);
Script.assign("val", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "val", null, "{ foo: 'bar' }", ctx).pass);
}
@Test
public void testJsonFromJsRead() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var temp = read('classpath:test.json'); return temp.foo == 'bar'; }", ctx);
Script.assign("val", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "val", null, "true", ctx).pass);
}
@Test
public void testParsingVariableAndJsonPath() {
assertEquals(StringUtils.pair("foo", "$"), Script.parseVariableAndPath("foo"));
assertEquals(StringUtils.pair("foo", "$.bar"), Script.parseVariableAndPath("foo.bar"));
assertEquals(StringUtils.pair("foo", "$['bar']"), Script.parseVariableAndPath("foo['bar']"));
assertEquals(StringUtils.pair("foo", "$[0]"), Script.parseVariableAndPath("foo[0]"));
assertEquals(StringUtils.pair("foo", "$[0].bar"), Script.parseVariableAndPath("foo[0].bar"));
assertEquals(StringUtils.pair("foo", "$[0]['bar']"), Script.parseVariableAndPath("foo[0]['bar']"));
assertEquals(StringUtils.pair("foo", "/bar"), Script.parseVariableAndPath("foo/bar"));
assertEquals(StringUtils.pair("foo", "/"), Script.parseVariableAndPath("foo/"));
assertEquals(StringUtils.pair("foo", "/bar/baz[1]/ban"), Script.parseVariableAndPath("foo/bar/baz[1]/ban"));
}
@Test
public void testSetValueOnVariableByPath() {
ScenarioContext ctx = getContext();
// json
Script.assign("json", "{ foo: 'bar' }", ctx);
Script.setValueByPath("json", "$.foo", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'hello' }", ctx).pass);
Script.setValueByPath("json.foo", null, "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: null }", ctx).pass);
Script.setValueByPath("json.foo", null, "'world'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world' }", ctx).pass);
Script.setValueByPath("json.bar[0]", null, "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world', bar: [1] }", ctx).pass);
Script.setValueByPath("json.bar[0]", null, "2", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world', bar: [2] }", ctx).pass);
Script.setValueByPath("json.bar[1]", null, "3", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world', bar: [2, 3] }", ctx).pass);
// json key that needs to be within quotes
Script.assign("json", "{ 'bad-name': 'foo' }", ctx);
Script.setValueByPath("json", "$['bad-name']", "'bar'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ 'bad-name': 'bar' }", ctx).pass);
// json where parent nodes are built automatically
Script.assign("json", "{}", ctx);
Script.setValueByPath("json", "$.foo.bar", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: { bar: 'hello' } }", ctx).pass);
Script.assign("json", "[]", ctx);
Script.setValueByPath("json", "$[0].a[0].c", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "[{a:[{c:1}]}]", ctx).pass);
// json append to arrays
Script.assign("json", "[]", ctx);
Script.setValueByPath("json", "$[]", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "[1]", ctx).pass);
Script.assign("json", "{ a: [] }", ctx);
Script.setValueByPath("json", "$.a[]", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: [1] }", ctx).pass);
Script.assign("json", "{}", ctx);
Script.setValueByPath("json", "$.a[]", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: [1] }", ctx).pass);
Script.assign("json", "{ a: [1] }", ctx);
Script.setValueByPath("json", "$.a[]", "2", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: [1, 2] }", ctx).pass);
// xml
Script.assign("xml", "<root><foo>bar</foo></root>", ctx);
Script.setValueByPath("xml", "/root/foo", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>hello</foo></root>", ctx).pass);
Script.setValueByPath("xml/root/foo", null, "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo/></root>", ctx).pass);
Script.setValueByPath("xml/root/foo", null, "'world'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>world</foo></root>", ctx).pass);
// xml where parent nodes are built automatically
Script.assign("xml", "<root/>", ctx);
Script.setValueByPath("xml", "/root/foo", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>hello</foo></root>", ctx).pass);
Script.assign("xml", "<root/>", ctx);
Script.setValueByPath("xml/root/foo/@bar", null, "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo bar=\"1\"/></root>", ctx).pass);
Script.setValueByPath("xml/root/foo[2]", null, "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo bar=\"1\"/><foo>1</foo></root>", ctx).pass);
}
@Test
public void testSetXmlChunkAutoConversion() {
ScenarioContext ctx = getContext();
Script.assign("xml", "<foo><bar></bar></foo>", ctx);
Script.assign("chunk", "<hello>world</hello>", ctx);
Script.setValueByPath("xml", "/foo/bar", "chunk", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar><hello>world</hello></bar></foo>", ctx).pass);
}
@Test
public void testDeleteValueOnVariableByPath() {
ScenarioContext ctx = getContext();
// json
Script.assign("json", "{ foo: 'bar', baz: 'ban' }", ctx);
Script.removeValueByPath("json", "$.baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'bar' }", ctx).pass);
Script.setValueByPath("json.baz", null, "[1, 2, 3]", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'bar', baz: [1, 2, 3] }", ctx).pass);
Script.removeValueByPath("json", "$.baz[1]", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'bar', baz: [1, 3] }", ctx).pass);
// xml
Script.assign("xml", "<root><foo>bar</foo></root>", ctx);
Script.removeValueByPath("xml", "/root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root/>", ctx).pass);
// xml attribute
Script.assign("xml", "<root hello=\"world\"><foo>bar</foo></root>", ctx);
Script.removeValueByPath("xml", "/root/@hello", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>bar</foo></root>", ctx).pass);
}
@Test
public void testCallJsFunctionWithMap() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: 'bar', hello: 'world' }", ctx);
Script.assign("fun", "function(o){ return o }", ctx);
Script.assign("res", "call fun json", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "json", ctx).pass);
}
@Test
public void testDefaultValidators() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar' }");
ctx.vars.put("json", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#ignore' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#notnull' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex^bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex ^bar' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex^baX' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex ^baX' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: null }");
ctx.vars.put("json", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#ignore' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#notnull' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: 'a9f7a56b-8d5c-455c-9d13-808461d17b91' }");
ctx.vars.put("json", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#uuid' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: 'a9f7a56b-8d5c-455c-9d13' }");
ctx.vars.put("json", doc);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#uuid' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: 5 }");
ctx.vars.put("json", doc);
ctx.vars.put("min", 4);
ctx.vars.put("max", 6);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ == 5' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ < 6' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ > 4' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ > 4 && _ < 6' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ > min && _ < max' }", ctx).pass);
}
@Test
public void testStringThatStartsWithHashSymbol() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: '#####' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#####' }", ctx).pass);
}
@Test
public void testSimpleJsonMatch() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar' }");
ctx.vars.put("json", doc);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ }", ctx).pass);
}
@Test
public void testAssignJsonChunkObjectAndUse() {
ScenarioContext ctx = getContext();
//===
Script.assign("parent", "{ foo: 'bar', 'ban': { a: 1 } }", ctx);
Script.assign("child", "parent.ban", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.a", null, "1", ctx).pass);
//===
Script.assign("parent", "{ foo: 'bar', 'ban': { a: [1, 2, 3] } }", ctx);
Script.assign("child", "parent.ban", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.a[1]", null, "2", ctx).pass);
}
@Test
public void testAssignJsonChunkListAndUse() {
ScenarioContext ctx = getContext();
//===
Script.assign("parent", "{ foo: { bar: [{ baz: 1}, {baz: 2}, {baz: 3}] }}", ctx);
Script.assign("child", "parent.foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child", null, "{ bar: [{ baz: 1}, {baz: 2}, {baz: 3}]}", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.bar", null, "[{ baz: 1}, {baz: 2}, {baz: 3}]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.bar[0]", null, "{ baz: 1}", ctx).pass);
}
@Test
public void testEvalUrl() {
ScenarioContext ctx = getContext();
String url = "'http://localhost:8089/v1/cats'";
assertEquals("http://localhost:8089/v1/cats", Script.evalKarateExpression(url, ctx).getAsString());
}
@Test
public void testEvalParamWithDot() {
ScenarioContext ctx = getContext();
String param = "'ACS.Itself'";
assertEquals("ACS.Itself", Script.evalKarateExpression(param, ctx).getAsString());
}
@Test
public void testMatchHandlesNonStringNullsGracefully() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: null }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "[]", ctx).pass);
}
@Test
public void testMatchJsonObjectContains() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: 'bar', baz: [1, 2, 3] }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ baz: [1, 2, 3], foo: 'bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ baz: [1, 2, 3] }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ foo: 'bar' }", ctx).pass);
}
@Test
public void testMatchJsonObjectPartialNotContains() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ a: 1, b: 2}", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: 1, b: 3 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: 1, b: '#string' }", ctx).pass);
}
@Test
public void testMatchJsonArrayContains() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: [1, 2, 3] }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.bar", null, "[1 ,2, 3]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "foo.bar", null, "[1]", ctx).pass);
}
@Test
public void testMatchContainsForSingleElements() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: [1, 2, 3] }", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "foo.bar", null, "1", ctx).pass);
Script.assign("json", "[{ foo: 1 }, { foo: 2 }, { foo: 3 }]", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ foo: 1 }", ctx).pass);
Script.assign("json", "[{ foo: 1 }]", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "{ foo: 1 }", ctx).pass);
}
@Test
public void testMatchJsonObjectErrorReporting() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ a: 1, b: 2, c: 3}", ctx);
AssertionResult ar = Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: 1, c: 3 }", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("actual value has 1 more key"));
}
@Test
public void testMatchJsonArrayErrorReporting() {
ScenarioContext ctx = getContext();
Script.assign("json", "[{ foo: 1 }, { foo: 2 }, { foo: 3 }]", ctx);
AssertionResult ar = Script.matchNamed(MatchType.EQUALS, "json", null, "[{ foo: 1 }, { foo: 2 }, { foo: 4 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("actual: 3, expected: 4"));
ar = Script.matchNamed(MatchType.CONTAINS, "json", null, "[{ foo: 1 }, { foo: 2 }, { foo: 4 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("$[*]"));
ar = Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "[{ foo: 3 }, { foo: 2 }, { foo: 0 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("$[*]"));
ar = Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "[{ foo: 3 }, { foo: 2 }, { foo: 1 }]", ctx);
assertTrue(ar.pass);
ar = Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "[{ foo: 3 }, { foo: 2 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("not the same size"));
}
@Test
public void testMatchStringEqualsAndContains() {
ScenarioContext ctx = getContext();
Script.assign("foo", "'hello world'", ctx);
// assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'hello world'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "'blah'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'blah'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "'hello world'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "foo", null, "'hello'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "'zoo'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "foo", null, "'blah'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_CONTAINS, "foo", null, "'world'", ctx).pass);
}
@Test
public void testKarateEnvAccessFromScript() {
FeatureContext featureContext = FeatureContext.forEnv("baz");
CallContext callContext = new CallContext(null, true);
ScenarioContext ctx = new ScenarioContext(featureContext, callContext, null, null);
Script.assign("foo", "function(){ return karate.env }", ctx);
Script.assign("bar", "call foo", ctx);
ScriptValue bar = ctx.vars.get("bar");
assertEquals("baz", bar.getValue());
// null
featureContext = FeatureContext.forEnv();
ctx = new ScenarioContext(featureContext, callContext, null, null);
Script.assign("foo", "function(){ return karate.env }", ctx);
Script.assign("bar", "call foo", ctx);
bar = ctx.vars.get("bar");
assertNull(bar.getValue());
}
@Test
public void testCallingFeatureWithNoArgument() {
ScenarioContext ctx = getContext();
Script.assign("foo", "call read('test-called.feature')", ctx);
ScriptValue a = Script.evalJsonPathOnVarByName("foo", "$.a", ctx);
assertEquals(1, a.getValue());
ScriptValue b = Script.evalJsonPathOnVarByName("foo", "$.b", ctx);
assertEquals(2, b.getValue());
}
@Test
public void testCallingFeatureWithVarOverrides() {
ScenarioContext ctx = getContext();
Script.assign("foo", "call read('test-called.feature') { c: 3 }", ctx);
ScriptValue a = Script.evalJsonPathOnVarByName("foo", "$.a", ctx);
assertEquals(1, a.getValue());
ScriptValue b = Script.evalJsonPathOnVarByName("foo", "$.b", ctx);
assertEquals(2, b.getValue());
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$.c", ctx);
assertEquals(3, c.getValue());
}
@Test
public void testCallingFeatureWithVarOverrideFromVariable() {
ScenarioContext ctx = getContext();
Script.assign("bar", "{ c: 3 }", ctx);
Script.assign("foo", "call read('test-called.feature') bar", ctx);
ScriptValue a = Script.evalJsonPathOnVarByName("foo", "$.a", ctx);
assertEquals(1, a.getValue());
ScriptValue b = Script.evalJsonPathOnVarByName("foo", "$.b", ctx);
assertEquals(2, b.getValue());
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$.c", ctx);
assertEquals(3, c.getValue());
}
@Test
public void testCallingFeatureWithList() {
ScenarioContext ctx = getContext();
Script.assign("foo", "call read('test-called.feature') [{c: 100}, {c: 200}, {c: 300}]", ctx);
ScriptValue c0 = Script.evalJsonPathOnVarByName("foo", "$[0].c", ctx);
assertEquals(100, c0.getValue());
ScriptValue c1 = Script.evalJsonPathOnVarByName("foo", "$[1].c", ctx);
assertEquals(200, c1.getValue());
ScriptValue c2 = Script.evalJsonPathOnVarByName("foo", "$[2].c", ctx);
assertEquals(300, c2.getValue());
}
@Test
public void testCallingFeatureThatEvaluatesEmbeddedExpressions() {
ScenarioContext ctx = getContext();
Script.assign("result", "call read('test-called-embedded.feature') { foo: 'world' }", ctx);
ScriptValue sv1 = Script.evalJsonPathOnVarByName("result", "$.json.hello", ctx);
assertEquals("world", sv1.getValue());
ScriptValue sv2 = Script.evalJsonPathOnVarByName("result", "$.xml.hello", ctx);
assertEquals("world", sv2.getValue());
}
@Test
public void testCallingFeatureThatEvaluatesEmbeddedExpressionsFromFileRead() {
ScenarioContext ctx = getContext();
Script.assign("result", "call read('test-called-embedded-file.feature') { foo: 'world' }", ctx);
ScriptValue sv1 = Script.evalJsonPathOnVarByName("result", "$.json.hello", ctx);
assertEquals("world", sv1.getValue());
ScriptValue sv2 = Script.evalJsonPathOnVarByName("result", "$.xml.hello", ctx);
assertEquals("world", sv2.getValue());
}
@Test
public void testCallingFeatureWithJsonCreatedByJavaScript() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return { c: 100} }", ctx);
Script.assign("res", "call fun", ctx);
Script.assign("foo", "call read('test-called.feature') res", ctx);
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$.c", ctx);
assertEquals(100, c.getValue());
}
@Test
public void testCallingFeatureWithJsonArrayCreatedByJavaScript() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [{ c: 100}] }", ctx);
Script.assign("res", "call fun", ctx);
Script.assign("foo", "call read('test-called.feature') res", ctx);
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$[0].c", ctx);
assertEquals(100, c.getValue());
}
@Test
public void testSetOnJsonArrayCreatedByJavaScript() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [{a: 1}, {a: 2}, {b: 3}] }", ctx);
Script.assign("json", "call fun", ctx);
Script.setValueByPath("json[1].a", null, "5", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "[{a: 1}, {a: 5}, {b: 3}]", ctx).pass);
}
@Test
public void testGetSyntaxForJson() {
ScenarioContext ctx = getContext();
Script.assign("foo", "[{baz: 1}, {baz: 2}, {baz: 3}]", ctx);
Script.assign("nums", "get foo[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "nums", null, "[1, 2, 3]", ctx).pass);
Script.assign("first", "get[0] foo[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "first", null, "1", ctx).pass);
Script.assign("second", "get[1] foo[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "second", null, "2", ctx).pass);
// alternative to get, usable in-line within match statements
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[*].baz", null, "$foo[*].baz", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "$foo[*].baz", null, "$foo[*].baz", ctx).pass);
Script.assign("foo", "{ bar: [{baz: 1}, {baz: 2}, {baz: 3}]}", ctx);
Script.assign("nums", "get foo.bar[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "nums", null, "[1, 2, 3]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.bar[*].baz", null, "$foo.bar[*].baz", ctx).pass);
Script.assign("nums", "get foo $.bar[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "nums", null, "[1, 2, 3]", ctx).pass);
Script.assign("response", "[{baz: 1}, {baz: 2}, {baz: 3}]", ctx);
Script.assign("second", "get[1] $[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "second", null, "2", ctx).pass);
Script.assign("third", "get[2] response $[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "third", null, "3", ctx).pass);
}
@Test
public void testGetSyntaxForXml() {
ScenarioContext ctx = getContext();
Script.assign("foo", "<records>\n <record>a</record>\n <record>b</record>\n <record>c</record>\n</records>", ctx);
Script.assign("count", "get foo count(//record)", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "count", null, "3", ctx).pass);
}
@Test
public void testFromJsKarateCallFeatureWithNoArg() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return karate.call('test-called.feature') }", ctx);
Script.assign("res", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.a", null, "1", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.b", null, "2", ctx).pass);
}
@Test
public void testFromJsKarateCallFeatureWithJsonArg() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return karate.call('test-called.feature', {c: 3}) }", ctx);
Script.assign("res", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.a", null, "1", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.b", null, "2", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.c", null, "3", ctx).pass);
}
@Test
public void testFromJsKarateGetForNonExistentVariable() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var foo = karate.get('foo'); return foo ? true : false }", ctx);
Script.assign("res", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "false", ctx).pass);
}
@Test
public void testFromJsKarateGetForJsonArrayVariable() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [1, 2, 3] }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "[1, 2, 3]", ctx).pass);
}
@Test
public void testFromJsKarateGetForJsonObjectVariableAndCallFeatureAndJs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "read('headers.js')", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "{ foo: 'bar_someValue' }", ctx).pass);
Script.assign("signin", "call read('signin.feature')", ctx);
Script.assign("ticket", "signin.ticket", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "ticket", null, "{ foo: 'bar' }", ctx).pass);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "{ foo: 'bar_someValue', baz: 'ban' }", ctx).pass);
}
@Test
public void testFromJsKarateJsonPath() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var foo = [{v:1},{v:2}]; return karate.jsonPath(foo, '$[*].v') }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "[1, 2]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "karate.jsonPath([{v:1},{v:2}], '$[*].v')", ctx).pass);
Script.assign("foo", "[{v:1},{v:2}]", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "karate.jsonPath(foo, '$[*].v')", ctx).pass);
}
@Test
public void testAssigningRawTextWhichOtherwiseConfusesKarate() {
ScenarioContext ctx = getContext();
try {
Script.assign("foo", "{ not json }", ctx);
fail("we expected this to fail");
} catch (InvalidJsonException e) {
logger.debug("expected {}", e.getMessage());
}
Script.assign(AssignType.TEXT, "foo", "{ not json }", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'{ not json }'", ctx).pass);
}
@Test
public void testBigDecimalsInJson() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ val: -1002.2000000000002 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000002 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000002 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
Script.assign("foo", "{ val: -1002.20 }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000000 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000000 }", ctx).pass);
Script.assign("foo", "{ val: -1002.2000000000001 }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
Script.assign("foo", "{ val: -1002.2000000000000 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
}
@Test
public void testDollarInEmbeddedExpressions() {
ScenarioContext ctx = getContext();
Script.assign("temperature", "{ celsius: 100, fahrenheit: 212 }", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "temperature", null, "{ fahrenheit: 212 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "temperature", null, "{ fahrenheit: '#($.celsius * 1.8 + 32)' }", ctx).pass);
}
@Test
public void testOptionalAndUnMatchedActualKeys() {
ScenarioContext ctx = getContext();
Script.assign("expected", "{ a: 1, b: 2, c: '##null' }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "expected", null, "{ a: 1, b: 2, d: 3}", ctx).pass);
}
@Test
public void testValidationStringInsteadOfNumberInPredicate() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: 5 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ == 5' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ > 0' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number? _ > 0' }", ctx).pass);
Script.assign("foo", "{ bar: '5' }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ == 5' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ === 5' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ > 0' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number? _ > 0' }", ctx).pass);
}
@Test
public void testMatchMacroArray() {
ScenarioContext ctx = getContext();
Script.assign("foo", "['bar', 'baz']", ctx);
Script.assign("arr", "'#string'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#array'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#number'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2]'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[1]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[_ == 2]'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[_ != 2]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] arr'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (arr)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] #string'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] #number'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2] #string'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[1] arr'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[1] (arr)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2]? _.length == 3'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2]? _.length == 4'", ctx).pass);
// non-root path
Script.assign("foo", "{ ban: ['bar', 'baz'], count: 2 }", ctx);
Script.assign("len", "2", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[] arr'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[] (arr)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[2] arr'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[2] (arr)'", ctx).pass);
// assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[$.count] #string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[foo.count] #string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[len] #string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[_ < 3]'", ctx).pass);
}
@Test
public void testMatchMacroArrayComplex() {
ScenarioContext ctx = getContext();
Script.assign("foo", "[{ a: 1, b: 2 }, { a: 3, b: 4 }]", ctx);
Script.assign("bar", "{ a: '#number', b: '#number' }", ctx);
Script.assign("baz", "{ c: '#number' }", ctx);
Script.assign("ban", "{ b: '#number' }", ctx);
assertTrue(Script.matchNamed(MatchType.EACH_EQUALS, "foo", null, "bar", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^*bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^*ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^^ban)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^baz)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^*bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^^bar'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^^ban'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^*ban'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^ban'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^bar'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^ban'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^baz'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^*bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^*ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^^ban)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^baz)'", ctx).pass);
}
@Test
public void testMatchMacroArrayComplexContains() {
ScenarioContext ctx = getContext();
Script.assign("foo", "[{ a: 1, b: 2 }, { a: 3, b: 4 }]", ctx);
Script.assign("rev", "[{ a: 3, b: 4 }, { a: 1, b: 2 }]", ctx);
Script.assign("part", "[{ a: 1, b: 2 }]", ctx);
Script.assign("one", "{ a: 1, b: 2 }", ctx);
Script.assign("nopes", "[{ a: 6, b: 7 }, { a: 8, b: 9 }]", ctx);
Script.assign("nope", "{ a: 8, b: 9 }", ctx);
Script.assign("bar", "{ b: '#number' }", ctx);
Script.assign("baz", "{ c: '#number' }", ctx);
assertFalse(Script.matchNamed(MatchType.EACH_EQUALS, "foo", null, "bar", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^*bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^baz)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^bar'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^baz'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^baz)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^*foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^rev)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^rev)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(rev)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^part)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^part)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^part)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^one)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^one)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^one)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(nopes)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^nopes)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^nopes)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^nopes)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(nope)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^nope)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^nope)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^nope)'", ctx).pass);
}
@Test
public void testSchemaLikeAndOptionalKeys() {
ScenarioContext ctx = getContext();
Script.assign("child", "{ hello: '#string' }", ctx);
Script.assign("json", "{ foo: 'bar', baz: [1, 2, 3]}", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number', child: '##(child)' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number', child: '#(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##[] #number' }", ctx).pass);
Script.assign("json", "{ foo: 'bar', child: { hello: 'world' } }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number', child: '#(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##[] #number', child: '#(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##[] #number', child: '##(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', blah: '##number', child: '#(child)' }", ctx).pass);
Script.assign("json", "{ foo: 'bar', baz: null }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##string' }", ctx).pass);
Script.assign("json", "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "'##string'", ctx).pass);
}
@Test
public void testPresentNotPresentAndOptionalNulls() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '##null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: null }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#present' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#present' }", ctx).pass);
Script.assign("json", "{ a: 1 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#present' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#present' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: null }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '##null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
Script.assign("json", "{ a: null }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: null }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: null }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '##null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '##null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
}
@Test
public void testJsonPathWhenActualIsEmptyString() {
ScenarioContext ctx = getContext();
Script.assign("response", "''", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "$.foo", null, "'#notnull'", ctx).pass);
}
@Test
public void testReplace() {
ScenarioContext ctx = getContext();
assertEquals("foo", Script.replacePlaceholderText("foo", "foo", "'bar'", ctx));
assertEquals("bar", Script.replacePlaceholderText("<foo>", "foo", "'bar'", ctx));
assertEquals("bar", Script.replacePlaceholderText("<foo>", "foo", "'bar'", ctx));
assertEquals("bar", Script.replacePlaceholderText("@@foo@@", "@@foo@@", "'bar'", ctx));
assertEquals("bar bar bar", Script.replacePlaceholderText("<foo> <foo> <foo>", "foo", "'bar'", ctx));
}
@Test
public void testEvalFromJs() {
ScenarioContext ctx = getContext();
Script.assign("temperature", "{ celsius: 100, fahrenheit: 212 }", ctx);
Script.assign("res", "karate.eval('temperature.celsius')", ctx);
Script.assign("bool", "karate.eval('temperature.celsius == 100')", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "100", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bool", null, "true", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temperature.fahrenheit", null, "karate.eval('temperature.celsius * 1.8 + 32')", ctx).pass);
}
@Test
public void testRemoveIfNullMultiple() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ first: 'bar', second: '##(null)', third: '##(null)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ first: 'bar' }", ctx).pass);
}
@Test
public void testMatchingIsStrictForDataTypes() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ a: '5', b: 5, c: true, d: 'true' }", ctx);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ a: 5 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ b: '5' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ c: 'true' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ d: true }", ctx).pass);
}
@Test
public void testTypeConversion() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.JSON, "foo", "[]", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "[]", ctx).pass);
Script.assign(AssignType.JSON, "foo", "{}", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{}", ctx).pass);
}
@Test
public void testBinaryMatching() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.BYTE_ARRAY, "data", "read('file:src/main/resources/res/karate-logo.png')", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "data", null, "read('file:src/main/resources/res/karate-logo.png')", ctx).pass);
}
@Test
public void testJsonCyclicReferences() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var env = 'dev'; var config = { env: env }; return config }", ctx);
Script.assign("json", "fun()", ctx);
Map value = (Map) ctx.vars.get("json").getValue();
value.put("child", value);
value = JsonUtils.removeCyclicReferences(value);
DocumentContext doc = JsonUtils.toJsonDoc(value);
Map temp = doc.read("$");
Match.equals(temp, "{ env: 'dev', child: '#java.util.LinkedHashMap' }");
}
@Test
public void testMatchFunctionOnLhs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return true }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "fun()", null, "true", ctx).pass);
Script.assign("fun", "function(){ return { a: 1 } }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "fun()", null, "{ a: 1 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "fun().a", null, "1", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "(fun().a)", null, "1", ctx).pass);
}
@Test
public void testKarateToJson() {
ScenarioContext ctx = getContext();
Script.assign("SP", "Java.type('com.intuit.karate.SimplePojo')", ctx);
Script.assign("sp", "new SP()", ctx);
Script.evalJsExpression("sp.bar = 10", ctx);
Script.assign("foo", "karate.toJson(sp)", ctx);
Script.assign("bar", "karate.toJson(sp, true)", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ foo: null, bar: 10 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar", null, "{ bar: 10 }", ctx).pass);
}
@Test
public void testMatchJavaBeanPropertyOhLhs() {
ScenarioContext ctx = getContext();
Script.assign("SP", "Java.type('com.intuit.karate.SimplePojo')", ctx);
Script.assign("sp", "new SP()", ctx);
Script.evalJsExpression("sp.bar = 10", ctx);
Script.assign("foo", "karate.toJson(sp)", ctx);
Script.assign("bar", "karate.toJson(sp, true)", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "sp.foo", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "sp.bar", null, "10", ctx).pass);
}
@Test
public void notEqualMatchTest(){
Map<String, Object> result = Runner.runFeature(getClass(), "core/notEqualMatch.feature", null, true);
assertNotEquals(result.get("a"),result.get("b"));
}
}
|
karate-core/src/test/java/com/intuit/karate/ScriptTest.java
|
package com.intuit.karate;
import com.intuit.karate.core.MatchType;
import com.intuit.karate.core.FeatureContext;
import com.intuit.karate.core.ScenarioContext;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.InvalidJsonException;
import com.jayway.jsonpath.JsonPath;
import java.io.ByteArrayInputStream;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.*;
import org.w3c.dom.Document;
/**
*
* @author pthomas3
*/
public class ScriptTest {
private static final Logger logger = LoggerFactory.getLogger(ScriptTest.class);
private ScenarioContext getContext() {
Path featureDir = FileUtils.getPathContaining(getClass());
FeatureContext featureContext = FeatureContext.forWorkingDir("dev", featureDir.toFile());
CallContext callContext = new CallContext(null, true);
return new ScenarioContext(featureContext, callContext, null, null);
}
private AssertionResult matchJsonObject(Object act, Object exp, ScenarioContext context) {
return Script.matchNestedObject('.', "$", MatchType.EQUALS, null, null, act, exp, context);
}
@Test
public void testParsingTextType() {
assertTrue(Script.isVariable("foo"));
assertTrue(Script.isXmlPath("/foo"));
assertTrue(Script.isXmlPath("//foo"));
assertTrue(Script.isXmlPathFunction("lower-case('Foo')"));
assertTrue(Script.isXmlPathFunction("count(/journal/article)"));
assertTrue(Script.isVariableAndSpaceAndPath("foo count(/journal/article)"));
assertTrue(Script.isVariableAndSpaceAndPath("foo $"));
}
@Test
public void testEvalPrimitives() {
ScenarioContext ctx = getContext();
ctx.vars.put("foo", "bar");
ctx.vars.put("a", 1);
ctx.vars.put("b", 2);
String expression = "foo + 'baz'";
ScriptValue value = Script.evalJsExpression(expression, ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("barbaz", value.getValue());
value = Script.evalJsExpression("a + b", ctx);
assertEquals(ScriptValue.Type.PRIMITIVE, value.getType());
assertEquals(3.0, value.getValue());
}
@Test
public void testMatchPrimitiveStrings() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", "3");
ctx.vars.put("b", 3);
assertFalse(Script.matchNamed(MatchType.EQUALS, "a", null, "b", ctx).pass);
}
@Test
public void testEvalMapsAndLists() {
ScenarioContext ctx = getContext();
Map<String, Object> testMap = new HashMap<>();
testMap.put("foo", "bar");
testMap.put("baz", 5);
List<Integer> testList = new ArrayList<>();
testList.add(1);
testList.add(2);
testMap.put("myList", testList);
ctx.vars.put("myMap", testMap);
String expression = "myMap.foo + myMap.baz";
ScriptValue value = Script.evalJsExpression(expression, ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("bar5", value.getValue());
value = Script.evalJsExpression("myMap.myList[0] + myMap.myList[1]", ctx);
assertEquals(ScriptValue.Type.PRIMITIVE, value.getType());
assertEquals(3.0, value.getValue());
}
@Test
public void testEvalJsonDocuments() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar', baz: [1, 2], ban: { hello: 'world' } }");
ctx.vars.put("myJson", doc);
ScriptValue value = Script.evalJsExpression("myJson.foo", ctx);
assertEquals("bar", value.getValue());
value = Script.evalJsExpression("myJson.baz[1]", ctx);
assertEquals(2, value.getValue());
value = Script.evalJsExpression("myJson.ban.hello", ctx);
assertEquals("world", value.getValue());
}
@Test
public void testEvalXmlDocuments() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo>bar</foo><hello>world</hello></root>");
ctx.vars.put("myXml", doc);
ScriptValue value = Script.evalJsExpression("myXml.root.foo", ctx);
assertEquals("bar", value.getValue());
}
@Test
public void testAssignXmlWithLineBreaksAndMatchJson() {
ScenarioContext ctx = getContext();
Script.assign("foo", "<records>\n <record>a</record>\n <record>b</record>\n <record>c</record>\n</records>", ctx);
Script.assign("bar", "foo.records", ctx);
ScriptValue value = ctx.vars.get("bar");
assertTrue(value.getType() == ScriptValue.Type.MAP);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar.record", null, "['a', 'b', 'c']", ctx).pass);
assertTrue(Script.assertBoolean("foo.records.record.length == 3", ctx).pass);
}
@Test
public void testAssignXmlWithLineBreaksAndNullElements() {
ScenarioContext ctx = getContext();
Script.assign("foo", "<records>\n <record>a</record>\n <record/>\n</records>", ctx);
Script.assign("bar", "foo.records", ctx);
ScriptValue value = ctx.vars.get("bar");
assertTrue(value.getType() == ScriptValue.Type.MAP);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar.record", null, "['a', null]", ctx).pass);
}
@Test
public void testJsonPathOnVarsByName() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar', baz: [1, 2], ban: { hello: 'world' } }");
ctx.vars.put("myJson", doc);
ScriptValue value = Script.evalJsonPathOnVarByName("myJson", "$.foo", ctx);
assertEquals("bar", value.getValue());
value = Script.evalKarateExpression("myJson.foo", ctx);
assertEquals("bar", value.getValue());
value = Script.evalJsonPathOnVarByName("myJson", "$.baz[1]", ctx);
assertEquals(2, value.getValue());
value = Script.evalKarateExpression("myJson.baz[1]", ctx);
assertEquals(2, value.getValue());
value = Script.evalJsonPathOnVarByName("myJson", "$.baz", ctx);
assertEquals(ScriptValue.Type.LIST, value.getType());
value = Script.evalJsonPathOnVarByName("myJson", "$.ban", ctx);
assertEquals(ScriptValue.Type.MAP, value.getType());
}
@Test
public void testXmlPathOnVarsByName() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo>bar</foo></root>");
ctx.vars.put("myXml", doc);
ScriptValue value = Script.evalXmlPathOnVarByName("myXml", "/root/foo", ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("bar", value.getAsString());
value = Script.evalKarateExpression("$myXml/root/foo", ctx);
assertEquals("bar", value.getAsString());
}
@Test
public void testEvalXmlEmbeddedExpressions() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", 1);
ctx.vars.put("b", 2);
Document doc = XmlUtils.toXmlDoc("<root><foo>#(a + b)</foo></root>");
Script.evalXmlEmbeddedExpressions(doc, ctx);
ctx.vars.put("myXml", doc);
ScriptValue value = Script.evalXmlPathOnVarByName("myXml", "/root/foo", ctx);
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("3.0", value.getAsString());
}
@Test
public void testEvalXmlEmbeddedExpressionsThatReturnChunks() {
ScenarioContext ctx = getContext();
Script.assign("hello", "<hello>world</hello>", ctx);
Script.assign("xml", "<foo><bar>#(hello)</bar></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar><hello>world</hello></bar></foo>", ctx).pass);
}
@Test
public void testEvalXmlEmbeddedExpressionsThatReturnNull() {
ScenarioContext ctx = getContext();
Script.assign("hello", "null", ctx);
Script.assign("xml", "<foo><bar>#(hello)</bar></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar></bar></foo>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar/></foo>", ctx).pass);
}
@Test
public void testEvalXmlEmbeddedExpressionsInAttributes() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", 5);
String xml = "<foo bar=\"#(a)\">#(a)</foo>";
Document doc = XmlUtils.toXmlDoc(xml);
Script.evalXmlEmbeddedExpressions(doc, ctx);
String result = XmlUtils.toString(doc);
logger.debug("result: {}", result);
assertTrue(result.endsWith("<foo bar=\"5\">5</foo>"));
}
@Test
public void testEvalXmlEmbeddedOptionalExpressionsInAttributes() {
ScenarioContext ctx = getContext();
Script.assign("a", "null", ctx);
Script.assign("xml", "<foo bar=\"##(a)\">baz</foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo>baz</foo>", ctx).pass);
}
@Test
public void testEvalXmlEmbeddedOptionalExpressions() {
ScenarioContext ctx = getContext();
Script.assign("a", "null", ctx);
Script.assign("xml", "<foo><a>hello</a><b>##(a)</b></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><a>hello</a></foo>", ctx).pass);
}
@Test
public void testEvalJsonEmbeddedExpressions() {
ScenarioContext ctx = getContext();
ctx.vars.put("a", 1);
ctx.vars.put("b", 2);
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: '#(a + b)' }");
Script.evalJsonEmbeddedExpressions(doc, ctx);
ctx.vars.put("myJson", doc);
ScriptValue value = Script.evalJsonPathOnVarByName("myJson", "$.foo", ctx);
assertEquals(ScriptValue.Type.PRIMITIVE, value.getType());
assertEquals(3.0, value.getValue());
}
@Test
public void testEvalEmbeddedExpressionsWithJsonPath() {
ScenarioContext ctx = getContext();
String ticket = "{ ticket: 'my-ticket', userId: '12345' }";
ctx.vars.put("ticket", JsonUtils.toJsonDoc(ticket));
String json = "{ foo: '#(ticket.userId)' }";
DocumentContext doc = JsonUtils.toJsonDoc(json);
Script.evalJsonEmbeddedExpressions(doc, ctx);
String result = doc.jsonString();
logger.debug("result: {}", result);
assertEquals("{\"foo\":\"12345\"}", result);
}
@Test
public void testEvalEmbeddedExpressionsWithJsonPathsWhichAreTricky() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ a: 1, b: 2, c: 3 }", ctx);
Script.assign("bar", "{ 'sp ace': '#(foo.a)', 'hy-phen': '#(foo.b)', 'full.stop': '#(foo.c)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar", null, "{ 'sp ace': 1, 'hy-phen': 2, 'full.stop': 3 }", ctx).pass);
}
@Test
public void testEvalEmbeddedOptionalExpressions() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ a: null, b: null }", ctx);
Script.assign("bar", "{ hello: '#(foo.a)', world: '##(foo.b)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar", null, "{ hello: null }", ctx).pass);
}
@Test
public void testEvalEmbeddedExpressionStream() {
ScenarioContext ctx = getContext();
ctx.vars.put("inputStream", new ScriptValue(new ByteArrayInputStream("hello world".getBytes())));
Script.assign("doc", "{ foo: '#(inputStream)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "doc", null, "{ foo: 'hello world' }", ctx).pass);
}
@Test
public void testVariableNameValidation() {
assertTrue(Script.isValidVariableName("foo"));
assertTrue(Script.isValidVariableName("foo_bar"));
assertTrue(Script.isValidVariableName("foo_"));
assertTrue(Script.isValidVariableName("foo1"));
assertTrue(Script.isValidVariableName("a"));
assertTrue(Script.isValidVariableName("a1"));
// bad
assertFalse(Script.isValidVariableName("foo.bar"));
assertFalse(Script.isValidVariableName("foo-bar"));
assertFalse(Script.isValidVariableName("$foo"));
assertFalse(Script.isValidVariableName("$foo/bar"));
assertFalse(Script.isValidVariableName("_foo"));
assertFalse(Script.isValidVariableName("_foo_"));
assertFalse(Script.isValidVariableName("0"));
assertFalse(Script.isValidVariableName("2foo"));
}
@Test
public void testMatchMapObjects() {
ScenarioContext ctx = getContext();
Map<String, Object> left = new HashMap<>();
left.put("foo", "bar");
Map<String, Object> right = new HashMap<>();
right.put("foo", "bar");
assertTrue(matchJsonObject(left, right, ctx).pass);
right.put("baz", "#ignore");
assertTrue(matchJsonObject(left, right, ctx).pass);
right.put("baz", "#notpresent");
assertTrue(matchJsonObject(left, right, ctx).pass);
left.put("baz", Arrays.asList(1, 2, 3));
right.put("baz", Arrays.asList(1, 2, 3));
assertTrue(matchJsonObject(left, right, ctx).pass);
left.put("baz", Arrays.asList(1, 2));
assertFalse(matchJsonObject(left, right, ctx).pass);
Map<String, Object> leftChild = new HashMap<>();
leftChild.put("a", 1);
Map<String, Object> rightChild = new HashMap<>();
rightChild.put("a", 1);
left.put("baz", leftChild);
right.put("baz", rightChild);
assertTrue(matchJsonObject(left, right, ctx).pass);
List<Map> leftList = new ArrayList<>();
leftList.add(leftChild);
List<Map> rightList = new ArrayList<>();
rightList.add(rightChild);
left.put("baz", leftList);
right.put("baz", rightList);
assertTrue(matchJsonObject(left, right, ctx).pass);
rightChild.put("a", 2);
assertFalse(matchJsonObject(left, right, ctx).pass);
rightChild.put("a", "#ignore");
assertTrue(matchJsonObject(left, right, ctx).pass);
}
@Test
public void testMatchListObjects() {
List left = new ArrayList();
List right = new ArrayList();
Map<String, Object> leftChild = new HashMap<>();
leftChild.put("a", 1);
left.add(leftChild);
Map<String, Object> rightChild = new HashMap<>();
rightChild.put("a", 1);
right.add(rightChild);
assertTrue(matchJsonObject(left, right, null).pass);
}
@Test
public void testMatchJsonPath() {
DocumentContext doc = JsonPath.parse("{ foo: 'bar', baz: { ban: [1, 2, 3]} }");
ScenarioContext ctx = getContext();
ctx.vars.put("myJson", doc);
ScriptValue myJson = ctx.vars.get("myJson");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.foo", "'bar'", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.baz", "{ ban: [1, 2, 3]} }", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.baz.ban[1]", "2", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.baz", "{ ban: [1, '#ignore', 3]} }", ctx).pass);
}
@Test
public void testMatchJsonPathThatReturnsList() {
DocumentContext doc = JsonPath.parse("{ foo: [{ bar: 1}, {bar: 2}, {bar: 3}]}");
ScenarioContext ctx = getContext();
ctx.vars.put("json", doc);
Script.assign("list", "json.foo", ctx);
ScriptValue list = ctx.vars.get("list");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, list, "$[0]", "{ bar: 1}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, list, "$[0].bar", "1", ctx).pass);
}
@Test
public void testMatchJsonPathOnLeftHandSide() {
ScenarioContext ctx = getContext();
String json = "[\n"
+ " {\n"
+ " \"a\": \"a\",\n"
+ " \"b\": \"a\",\n"
+ " \"c\": \"a\",\n"
+ " },\n"
+ " {\n"
+ " \"a\": \"ab\",\n"
+ " \"b\": \"ab\",\n"
+ " \"c\": \"ab\",\n"
+ " }\n"
+ "]";
Script.assign("response", json, ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response[?(@.b=='ab')]", null, "'#[1]'", ctx).pass);
}
@Test
public void testMatchAllJsonPath() {
DocumentContext doc = JsonPath.parse("{ foo: [{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]}");
ScenarioContext ctx = getContext();
ctx.vars.put("myJson", doc);
ScriptValue myJson = ctx.vars.get("myJson");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.NOT_CONTAINS, myJson, "$.foo", "[{bar: 1, baz: 'a'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.NOT_CONTAINS, myJson, "$.foo", "[{bar: 9, baz: 'z'}, {bar: 99, baz: 'zz'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_ONLY, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_ANY, myJson, "$.foo", "[{bar: 9, baz: 'z'}, {bar: 2, baz: 'b'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 2}, {bar:3}]", ctx).pass);
// shuffle
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_ONLY, myJson, "$.foo", "[{bar: 2, baz: 'b'}, {bar:3, baz: 'c'}, {bar: 1, baz: 'a'}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_ONLY, myJson, "$.foo", "[{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_EQUALS, myJson, "$.foo", "{bar:'#number', baz:'#string'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_CONTAINS, myJson, "$.foo", "{bar:'#number'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_CONTAINS, myJson, "$.foo", "{baz:'#string'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 2}, {bar:3}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.EACH_NOT_CONTAINS, myJson, "$.foo", "{baz:'z'}", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.EACH_NOT_CONTAINS, myJson, "$.foo", "{baz:'a'}", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.EACH_EQUALS, myJson, "$.foo", "{bar:'#? _ < 3', baz:'#string'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{foo: [{bar: 1}, {bar: 2}, {bar:3}]}", ctx).pass);
}
@Test
public void testMatchContainsDeep() {
DocumentContext doc = JsonPath.parse("{ foo: [{bar: 1, baz: 'a'}, {bar: 2, baz: 'b'}, {bar:3, baz: 'c'}], eoo: { doo: { car: 1, caz: 'a'} } }");
ScenarioContext ctx = getContext();
ctx.vars.put("myJson", doc);
ScriptValue myJson = ctx.vars.get("myJson");
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 2}, {bar:3}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1}, {bar: 4}, {bar:3}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1, baz: 'a'}]", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{bar: 1, baz: 'a', 'baq': 'b'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{baz: 'a'}, {bar: 2}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$.foo", "[{baz: 'a'}, {bar:3, baz: 'c'}]", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{eoo: '#ignore'}", ctx).pass);
assertTrue(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{eoo: { doo: {car: 1} } }", ctx).pass);
assertFalse(Script.matchJsonOrObject(MatchType.CONTAINS_DEEP, myJson, "$", "{eoo: { doo: {car: 'a'} } }", ctx).pass);
}
@Test
public void testMatchNotEquals() {
ScenarioContext ctx = getContext();
Script.assign("temp", "[1, 2]", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[1]'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[2]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[]? _ > 2'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#[]? _ > 0'", ctx).pass);
Script.assign("temp", "'foo'", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#regex .{2}'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#regex .{3}'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#? _.length == 2'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "temp", null, "'#? _.length == 3'", ctx).pass);
Script.assign("json", "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "null", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "1", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "1", ctx).pass);
Script.assign("json", "1", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "null", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "1", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "1", ctx).pass);
Script.assign("nope", "{ foo: '#number' }", ctx);
Script.assign("json", "{ foo: 'bar' }", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'#(^nope)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'#(nope)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'#array'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "'foo'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "[]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "1", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ foo: 'bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{}", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ foo: 'blah' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ foo: 'bar', baz: 'ban' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ foo: 'blah' }", ctx).pass);
Script.assign("json", "[{ foo: 'bar'}, { foo: 'baz' }]", ctx);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "[{ foo: 'bar'}, { foo: 'baz' }]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "[{ foo: 'bar'}, { foo: 'blah' }]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ foo: 'blah' }", ctx).pass);
}
@Test
public void testMatchJsonObjectReturnedFromJs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return { foo: 'bar' } }", ctx);
Script.assign("json", "{ foo: 'bar' }", ctx);
Script.assign("expected", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "expected", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "fun()", ctx).pass);
}
@Test
public void testMatchJsonArrayReturnedFromJs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [ 'foo', 'bar', 'baz' ] }", ctx);
Script.assign("json", "[ 'foo', 'bar', 'baz' ]", ctx);
Script.assign("expected", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "expected", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "fun()", ctx).pass);
}
@Test
public void testMatchJsonPathOnResponse() {
DocumentContext doc = JsonPath.parse("{ foo: 'bar' }");
ScenarioContext ctx = getContext();
ctx.vars.put("response", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "$", null, "{ foo: 'bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "$.foo", null, "'bar'", ctx).pass);
}
private final String ACTUAL = "{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"bef90f66-bb57-4fea-83aa-a0acc42b0426\"},\"primaryId\":\"bef90f66-bb57-4fea-83aa-a0acc42b0426\",\"created\":{\"on\":\"2016-02-28T05:56:48.485+0000\"},\"lastUpdated\":{\"on\":\"2016-02-28T05:56:49.038+0000\"},\"organization\":{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"631fafe9-8822-4c82-b4a4-8735b202c16c\"},\"created\":{\"on\":\"2016-02-28T05:56:48.486+0000\"},\"lastUpdated\":{\"on\":\"2016-02-28T05:56:49.038+0000\"}},\"clientState\":\"ACTIVE\"}";
private final String EXPECTED = "{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"#ignore\"},\"primaryId\":\"#ignore\",\"created\":{\"on\":\"#ignore\"},\"lastUpdated\":{\"on\":\"#ignore\"},\"organization\":{\"id\":{\"domain\":\"ACS\",\"type\":\"entityId\",\"value\":\"#ignore\"},\"created\":{\"on\":\"#ignore\"},\"lastUpdated\":{\"on\":\"#ignore\"}},\"clientState\":\"ACTIVE\"}";
@Test
public void testMatchTwoJsonDocsWithIgnores() {
DocumentContext actual = JsonPath.parse(ACTUAL);
DocumentContext expected = JsonPath.parse(EXPECTED);
ScenarioContext ctx = getContext();
ctx.vars.put("actual", actual);
ctx.vars.put("expected", expected);
ScriptValue act = ctx.vars.get("actual");
assertTrue(Script.matchJsonOrObject(MatchType.EQUALS, act, "$", "expected", ctx).pass);
}
@Test
public void testMatchXmlPathThatReturnsTextNode() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo>bar</foo><hello>world</hello></root>");
ctx.vars.put("myXml", doc);
ScriptValue myXml = ctx.vars.get("myXml");
assertTrue(Script.matchXml(MatchType.EQUALS, myXml, "/root/foo", "'bar'", ctx).pass);
assertTrue(Script.matchXml(MatchType.EQUALS, myXml, "/root/hello", "'world'", ctx).pass);
}
@Test
public void testMatchXmlPathThatReturnsXmlChunk() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<root><foo><bar>baz</bar></foo></root>");
ctx.vars.put("myXml", doc);
ScriptValue myXml = ctx.vars.get("myXml");
assertTrue(Script.matchXml(MatchType.EQUALS, myXml, "/root/foo", "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testMatchXmlPathThatReturnsNull() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo>bar</foo></root>", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "myXml//baz", null, "<baz>1</baz>", ctx).pass);
}
@Test
public void testMatchXmlEmptyAndNotPresent() {
ScenarioContext ctx = getContext();
Script.assign("xml", "<root><foo>bar</foo><baz/><ban></ban></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/foo", null, "'bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/baz", null, "''", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/ban", null, "''", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/foo", null, "'#present'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "xml/root/foo", null, "'#notpresent'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml/root/nope", null, "'#notpresent'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "xml/root/nope", null, "'#present'", ctx).pass);
}
@Test
public void testJsonEmptyAndNotPresent() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: 'bar' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'#present'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'#notpresent'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'#notpresent'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'#ignore'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'#ignore'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "'##string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'##string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'##number'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json.nope", null, "'#present'", ctx).pass);
}
@Test
public void testAssignAndMatchXmlText() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo>bar</foo></root>", ctx);
Script.assign("myStr", "$myXml/root/foo", ctx);
assertTrue(Script.assertBoolean("myStr == 'bar'", ctx).pass);
}
@Test
public void testAssignAndMatchXmlChunk() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myChunk", "$myXml/root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myChunk", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testAssignAndMatchXmlChunkByVariableReference() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myChunk", "$myXml/root/foo", ctx);
Script.assign("expected", "<foo><bar>baz</bar></foo>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myChunk", null, "expected", ctx).pass);
}
@Test
public void testAssignAndMatchXmlPathChunk() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/root/foo", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testAssignAndMatchXmlPathThatReturnsNodeListAgainstJsonArray() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>one</bar><bar>two</bar></foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/root/foo/bar", null, "['one', 'two']", ctx).pass);
}
@Test
public void testAssignAndMatchXmlPathThatReturnsNodeListAgainstList() {
ScenarioContext ctx = getContext();
Script.assign("myJson", "[{ val: 'one' }, { val: 'two' }]", ctx);
Script.assign("myList", "get myJson $[*].val", ctx);
Script.assign("myXml", "<root><foo><bar>one</bar><bar>two</bar></foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/root/foo/bar", null, "myList", ctx).pass);
}
@Test
public void testMatchXmlPathAutoConvertingFromMap() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myMap", "myXml", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myMap/root/foo", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testEvalXmlPathAutoConvertingFromMap() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo><bar>baz</bar></foo></root>", ctx);
Script.assign("myMap", "myXml", ctx);
Script.assign("temp", "get myXml /root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "<foo><bar>baz</bar></foo>", ctx).pass);
Script.assign("temp", "get myMap /root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "<foo><bar>baz</bar></foo>", ctx).pass);
}
@Test
public void testAssignXmlPathThatReturnsListThenMatch() {
ScenarioContext ctx = getContext();
Script.assign("response", XmlUtilsTest.TEACHERS_XML, ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "//teacher[@department='science']/subject", null, "['math', 'physics']", ctx).pass);
Script.assign("subjects", "//teacher[@department='science']/subject", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "subjects", null, "['physics', 'math']", ctx).pass);
Script.assign("teachers", "response", ctx); // becomes a map
Script.assign("subjects", "get teachers //teacher[@department='science']/subject", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "subjects", null, "['math', 'physics']", ctx).pass);
}
@Test
public void testRunningJsonPathOnStringAutoConvertsStringToJson() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.STRING, "response", "{ foo: { hello: 'world' } }", ctx, true);
Script.assign("foo", "$response.foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ hello: 'world' }", ctx).pass);
Script.assign("foo", "$.foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ hello: 'world' }", ctx).pass);
}
@Test
public void testCastJsonToString() {
ScenarioContext ctx = getContext();
Script.assign("myJson", "{ root: { foo: 'bar' } }", ctx);
Script.assign(AssignType.STRING, "myString", "myJson", ctx, true);
ScriptValue value = ctx.vars.get("myString");
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("{\"root\":{\"foo\":\"bar\"}}", value.getAsString());
}
@Test
public void testCastStringToJson() {
ScenarioContext ctx = getContext();
Script.assign("myString", "{\"root\":{\"foo\":\"bar\"}}", ctx);
Script.assign(AssignType.JSON, "myJson", "myString", ctx, true);
ScriptValue value = ctx.vars.get("myJson");
assertEquals(ScriptValue.Type.JSON, value.getType());
assertEquals("{\"root\":{\"foo\":\"bar\"}}", value.getAsString());
}
@Test
public void testCastJsonToXml() {
ScenarioContext ctx = getContext();
Script.assign("myJson", "{ root: { foo: 'bar' } }", ctx);
Script.assign(AssignType.XML, "myXml", "myJson", ctx, true);
ScriptValue value = ctx.vars.get("myXml");
assertEquals(ScriptValue.Type.XML, value.getType());
assertEquals("<root><foo>bar</foo></root>", value.getAsString());
}
@Test
public void testCastStringToXml() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.STRING, "myString", "<root><foo>bar</foo></root>", ctx, true);
Script.assign(AssignType.XML, "myXml", "myString", ctx, true);
ScriptValue value = ctx.vars.get("myXml");
assertEquals(ScriptValue.Type.XML, value.getType());
assertEquals("<root><foo>bar</foo></root>", value.getAsString());
}
@Test
public void testCastXmlToString() {
ScenarioContext ctx = getContext();
Script.assign("myXml", "<root><foo>bar</foo></root>", ctx);
Script.assign(AssignType.XML_STRING, "myString", "myXml", ctx, true);
ScriptValue value = ctx.vars.get("myString");
assertEquals(ScriptValue.Type.STRING, value.getType());
assertEquals("<root><foo>bar</foo></root>", value.getValue());
}
@Test
public void testCastPojoToJson() {
ScenarioContext ctx = getContext();
Script.assign("pojo", "new com.intuit.karate.SimplePojo()", ctx);
Script.assign(AssignType.JSON, "json", "pojo", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: null, bar: 0 }", ctx).pass);
}
@Test
public void testCastPojoToXml() {
ScenarioContext ctx = getContext();
Script.assign("pojo", "new com.intuit.karate.SimplePojo()", ctx);
Script.assign(AssignType.XML, "xml", "pojo", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo></foo><bar>0</bar></root>", ctx).pass);
}
@Test
public void testXmlShortCutsForResponse() {
ScenarioContext ctx = getContext();
Script.assign("response", "<root><foo>bar</foo></root>", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response", "/", "<root><foo>bar</foo></root>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response/", null, "<root><foo>bar</foo></root>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "response", null, "<root><foo>bar</foo></root>", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "/", null, "<root><foo>bar</foo></root>", ctx).pass);
}
@Test
public void testMatchXmlButUsingJsonPath() {
ScenarioContext ctx = getContext();
Document doc = XmlUtils.toXmlDoc("<cat><name>Billie</name><scores><score>2</score><score>5</score></scores></cat>");
ctx.vars.put("myXml", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml/cat/scores/score[2]", null, "'5'", ctx).pass);
// using json path for xml !
assertTrue(Script.matchNamed(MatchType.EQUALS, "myXml.cat.scores.score[1]", null, "'5'", ctx).pass);
}
@Test
public void testXmlStringConversion() {
ScenarioContext ctx = getContext();
Script.assign("response", "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx);
Script.assign(AssignType.XML_STRING, "temp", "response", ctx, false);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx).pass);
// XML DOM parsing unfortunately re-orders attributes
assertTrue(Script.matchNamed(MatchType.EQUALS, "temp", null, "'<foo><bar aaa=\"1\" bbb=\"2\"/></foo>'", ctx).pass);
}
@Test
public void testXmlStringConversionInJs() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.AUTO, "response", "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx, false);
Script.assign(AssignType.XML, "xml", "karate.prettyXml(response)", ctx, false);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar bbb=\"2\" aaa=\"1\"/></foo>", ctx).pass);
Script.assign(AssignType.AUTO, "temp", "karate.prettyXml(response)", ctx, false);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "temp", null, "'<bar aaa=\"1\" bbb=\"2\"/>'", ctx).pass);
}
@Test
public void testMatchXmlRepeatedElements() {
ScenarioContext ctx = getContext();
String xml = "<foo><bar>baz1</bar><bar>baz2</bar></foo>";
Document doc = XmlUtils.toXmlDoc(xml);
ctx.vars.put(ScriptValueMap.VAR_RESPONSE, doc);
ScriptValue response = ctx.vars.get(ScriptValueMap.VAR_RESPONSE);
assertTrue(Script.matchXml(MatchType.EQUALS, response, "/", "<foo><bar>baz1</bar><bar>baz2</bar></foo>", ctx).pass);
assertTrue(Script.matchXml(MatchType.EQUALS, response, "/foo/bar[2]", "'baz2'", ctx).pass);
assertTrue(Script.matchXml(MatchType.EQUALS, response, "/foo/bar[1]", "'baz1'", ctx).pass);
}
@Test
public void testMatchXmlAttributeErrorReporting() {
ScenarioContext ctx = getContext();
Script.assign("xml", "<hello foo=\"bar\">world</hello>", ctx);
ScriptValue xml = ctx.vars.get("xml");
assertTrue(Script.matchXml(MatchType.EQUALS, xml, "/", "<hello foo=\"bar\">world</hello>", ctx).pass);
AssertionResult ar = Script.matchXml(MatchType.EQUALS, xml, "/", "<hello foo=\"baz\">world</hello>", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("/hello/@foo"));
}
@Test
public void testAssigningAndCallingFunctionThatUpdatesVars() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(){ return { bar: 'baz' } }", ctx);
ScriptValue testFoo = ctx.vars.get("foo");
assertEquals(ScriptValue.Type.JS_FUNCTION, testFoo.getType());
Script.callAndUpdateConfigAndAlsoVarsIfMapReturned(false, "foo", null, ctx);
ScriptValue testBar = ctx.vars.get("bar");
assertEquals("baz", testBar.getValue());
}
@Test
public void testAssigningAndCallingFunctionThatCanBeUsedToAssignVariable() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(){ return 'world' }", ctx);
Script.assign("hello", "call foo", ctx);
ScriptValue hello = ctx.vars.get("hello");
assertEquals("world", hello.getValue());
}
@Test
public void testAssigningAndCallingFunctionWithArgumentsThatCanBeUsedToAssignVariable() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(pre){ return pre + ' world' }", ctx);
Script.assign("hello", "call foo 'hello'", ctx);
ScriptValue hello = ctx.vars.get("hello");
assertEquals("hello world", hello.getValue());
}
@Test
public void testCallingFunctionThatTakesPrimitiveArgument() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return { bar: a } }", ctx);
ScriptValue testFoo = ctx.vars.get("foo");
assertEquals(ScriptValue.Type.JS_FUNCTION, testFoo.getType());
Script.callAndUpdateConfigAndAlsoVarsIfMapReturned(false, "foo", "'hello'", ctx);
ScriptValue testBar = ctx.vars.get("bar");
assertEquals("hello", testBar.getValue());
}
@Test
public void testCallingFunctionThatTakesJsonArgument() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return { bar: a.hello } }", ctx);
ScriptValue testFoo = ctx.vars.get("foo");
assertEquals(ScriptValue.Type.JS_FUNCTION, testFoo.getType());
Script.callAndUpdateConfigAndAlsoVarsIfMapReturned(false, "foo", "{ hello: 'world' }", ctx);
ScriptValue testBar = ctx.vars.get("bar");
assertEquals("world", testBar.getValue());
}
@Test
public void testCallingFunctionWithJsonArray() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return a[0] }", ctx);
Script.assign("bar", "call foo ['hello']", ctx);
ScriptValue bar = ctx.vars.get("bar");
assertEquals("hello", bar.getValue());
}
@Test
public void testCallingFunctionWithJavaList() {
ScenarioContext ctx = getContext();
Script.assign("foo", "function(a){ return a[0] }", ctx);
Script.assign("bar", "['hello']", ctx);
Script.assign("baz", "call foo bar", ctx);
ScriptValue baz = ctx.vars.get("baz");
assertEquals("hello", baz.getValue());
}
@Test
public void testCallingFunctionThatUsesJsonPath() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: [{baz: 1}, {baz: 2}, {baz: 3}]}", ctx);
Script.assign("fun", "function(){ return karate.get('$foo.bar[*].baz') }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "[1, 2, 3]", ctx).pass);
// 'normal' variable name
Script.assign("fun", "function(){ return karate.get('foo') }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "{ bar: [{baz: 1}, {baz: 2}, {baz: 3}]}", ctx).pass);
}
@Test
public void testCallingFunctionWithJsonArrayReturnedFromAnotherFunction() {
ScenarioContext ctx = getContext();
Script.assign("fun1", "function(){ return [1, 2, 3] }", ctx);
Script.assign("res1", "call fun1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res1", null, "[1, 2, 3]", ctx).pass);
Script.assign("fun2", "function(arg){ return arg.length }", ctx);
Script.assign("res2", "call fun2 res1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res2", null, "3", ctx).pass);
}
@Test
public void testCallingFunctionWithJsonReturnedFromAnotherFunction() {
ScenarioContext ctx = getContext();
Script.assign("fun1", "function(){ return { foo: 'bar' } }", ctx);
Script.assign("res1", "call fun1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res1", null, "{ foo: 'bar' }", ctx).pass);
Script.assign("fun2", "function(arg){ return arg.foo }", ctx);
Script.assign("res2", "call fun2 res1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res2", null, "'bar'", ctx).pass);
}
@Test
public void testCallingFunctionWithStringReturnedFromAnotherFunction() {
ScenarioContext ctx = getContext();
Script.assign("fun1", "function(){ return 'foo' }", ctx);
Script.assign("res1", "call fun1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res1", null, "'foo'", ctx).pass);
Script.assign("fun2", "function(arg){ return arg + 'bar' }", ctx);
Script.assign("res2", "call fun2 res1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res2", null, "'foobar'", ctx).pass);
}
@Test
public void testJsonReturnedFromJsRead() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return read('classpath:test.json') }", ctx);
Script.assign("val", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "val", null, "{ foo: 'bar' }", ctx).pass);
}
@Test
public void testJsonFromJsRead() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var temp = read('classpath:test.json'); return temp.foo == 'bar'; }", ctx);
Script.assign("val", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "val", null, "true", ctx).pass);
}
@Test
public void testParsingVariableAndJsonPath() {
assertEquals(StringUtils.pair("foo", "$"), Script.parseVariableAndPath("foo"));
assertEquals(StringUtils.pair("foo", "$.bar"), Script.parseVariableAndPath("foo.bar"));
assertEquals(StringUtils.pair("foo", "$['bar']"), Script.parseVariableAndPath("foo['bar']"));
assertEquals(StringUtils.pair("foo", "$[0]"), Script.parseVariableAndPath("foo[0]"));
assertEquals(StringUtils.pair("foo", "$[0].bar"), Script.parseVariableAndPath("foo[0].bar"));
assertEquals(StringUtils.pair("foo", "$[0]['bar']"), Script.parseVariableAndPath("foo[0]['bar']"));
assertEquals(StringUtils.pair("foo", "/bar"), Script.parseVariableAndPath("foo/bar"));
assertEquals(StringUtils.pair("foo", "/"), Script.parseVariableAndPath("foo/"));
assertEquals(StringUtils.pair("foo", "/bar/baz[1]/ban"), Script.parseVariableAndPath("foo/bar/baz[1]/ban"));
}
@Test
public void testSetValueOnVariableByPath() {
ScenarioContext ctx = getContext();
// json
Script.assign("json", "{ foo: 'bar' }", ctx);
Script.setValueByPath("json", "$.foo", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'hello' }", ctx).pass);
Script.setValueByPath("json.foo", null, "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: null }", ctx).pass);
Script.setValueByPath("json.foo", null, "'world'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world' }", ctx).pass);
Script.setValueByPath("json.bar[0]", null, "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world', bar: [1] }", ctx).pass);
Script.setValueByPath("json.bar[0]", null, "2", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world', bar: [2] }", ctx).pass);
Script.setValueByPath("json.bar[1]", null, "3", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'world', bar: [2, 3] }", ctx).pass);
// json key that needs to be within quotes
Script.assign("json", "{ 'bad-name': 'foo' }", ctx);
Script.setValueByPath("json", "$['bad-name']", "'bar'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ 'bad-name': 'bar' }", ctx).pass);
// json where parent nodes are built automatically
Script.assign("json", "{}", ctx);
Script.setValueByPath("json", "$.foo.bar", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: { bar: 'hello' } }", ctx).pass);
Script.assign("json", "[]", ctx);
Script.setValueByPath("json", "$[0].a[0].c", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "[{a:[{c:1}]}]", ctx).pass);
// json append to arrays
Script.assign("json", "[]", ctx);
Script.setValueByPath("json", "$[]", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "[1]", ctx).pass);
Script.assign("json", "{ a: [] }", ctx);
Script.setValueByPath("json", "$.a[]", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: [1] }", ctx).pass);
Script.assign("json", "{}", ctx);
Script.setValueByPath("json", "$.a[]", "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: [1] }", ctx).pass);
Script.assign("json", "{ a: [1] }", ctx);
Script.setValueByPath("json", "$.a[]", "2", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: [1, 2] }", ctx).pass);
// xml
Script.assign("xml", "<root><foo>bar</foo></root>", ctx);
Script.setValueByPath("xml", "/root/foo", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>hello</foo></root>", ctx).pass);
Script.setValueByPath("xml/root/foo", null, "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo/></root>", ctx).pass);
Script.setValueByPath("xml/root/foo", null, "'world'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>world</foo></root>", ctx).pass);
// xml where parent nodes are built automatically
Script.assign("xml", "<root/>", ctx);
Script.setValueByPath("xml", "/root/foo", "'hello'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>hello</foo></root>", ctx).pass);
Script.assign("xml", "<root/>", ctx);
Script.setValueByPath("xml/root/foo/@bar", null, "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo bar=\"1\"/></root>", ctx).pass);
Script.setValueByPath("xml/root/foo[2]", null, "1", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo bar=\"1\"/><foo>1</foo></root>", ctx).pass);
}
@Test
public void testSetXmlChunkAutoConversion() {
ScenarioContext ctx = getContext();
Script.assign("xml", "<foo><bar></bar></foo>", ctx);
Script.assign("chunk", "<hello>world</hello>", ctx);
Script.setValueByPath("xml", "/foo/bar", "chunk", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<foo><bar><hello>world</hello></bar></foo>", ctx).pass);
}
@Test
public void testDeleteValueOnVariableByPath() {
ScenarioContext ctx = getContext();
// json
Script.assign("json", "{ foo: 'bar', baz: 'ban' }", ctx);
Script.removeValueByPath("json", "$.baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'bar' }", ctx).pass);
Script.setValueByPath("json.baz", null, "[1, 2, 3]", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'bar', baz: [1, 2, 3] }", ctx).pass);
Script.removeValueByPath("json", "$.baz[1]", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: 'bar', baz: [1, 3] }", ctx).pass);
// xml
Script.assign("xml", "<root><foo>bar</foo></root>", ctx);
Script.removeValueByPath("xml", "/root/foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root/>", ctx).pass);
// xml attribute
Script.assign("xml", "<root hello=\"world\"><foo>bar</foo></root>", ctx);
Script.removeValueByPath("xml", "/root/@hello", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "xml", null, "<root><foo>bar</foo></root>", ctx).pass);
}
@Test
public void testCallJsFunctionWithMap() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: 'bar', hello: 'world' }", ctx);
Script.assign("fun", "function(o){ return o }", ctx);
Script.assign("res", "call fun json", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "json", ctx).pass);
}
@Test
public void testDefaultValidators() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar' }");
ctx.vars.put("json", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#ignore' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#notnull' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex^bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex ^bar' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex^baX' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#regex ^baX' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: null }");
ctx.vars.put("json", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#ignore' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#notnull' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: 'a9f7a56b-8d5c-455c-9d13-808461d17b91' }");
ctx.vars.put("json", doc);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#uuid' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: 'a9f7a56b-8d5c-455c-9d13' }");
ctx.vars.put("json", doc);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#uuid' }", ctx).pass);
doc = JsonUtils.toJsonDoc("{ foo: 5 }");
ctx.vars.put("json", doc);
ctx.vars.put("min", 4);
ctx.vars.put("max", 6);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ == 5' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ < 6' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ > 4' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ > 4 && _ < 6' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ foo: '#? _ > min && _ < max' }", ctx).pass);
}
@Test
public void testStringThatStartsWithHashSymbol() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: '#####' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#####' }", ctx).pass);
}
@Test
public void testSimpleJsonMatch() {
ScenarioContext ctx = getContext();
DocumentContext doc = JsonUtils.toJsonDoc("{ foo: 'bar' }");
ctx.vars.put("json", doc);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", "$", "{ }", ctx).pass);
}
@Test
public void testAssignJsonChunkObjectAndUse() {
ScenarioContext ctx = getContext();
//===
Script.assign("parent", "{ foo: 'bar', 'ban': { a: 1 } }", ctx);
Script.assign("child", "parent.ban", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.a", null, "1", ctx).pass);
//===
Script.assign("parent", "{ foo: 'bar', 'ban': { a: [1, 2, 3] } }", ctx);
Script.assign("child", "parent.ban", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.a[1]", null, "2", ctx).pass);
}
@Test
public void testAssignJsonChunkListAndUse() {
ScenarioContext ctx = getContext();
//===
Script.assign("parent", "{ foo: { bar: [{ baz: 1}, {baz: 2}, {baz: 3}] }}", ctx);
Script.assign("child", "parent.foo", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child", null, "{ bar: [{ baz: 1}, {baz: 2}, {baz: 3}]}", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.bar", null, "[{ baz: 1}, {baz: 2}, {baz: 3}]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "child.bar[0]", null, "{ baz: 1}", ctx).pass);
}
@Test
public void testEvalUrl() {
ScenarioContext ctx = getContext();
String url = "'http://localhost:8089/v1/cats'";
assertEquals("http://localhost:8089/v1/cats", Script.evalKarateExpression(url, ctx).getAsString());
}
@Test
public void testEvalParamWithDot() {
ScenarioContext ctx = getContext();
String param = "'ACS.Itself'";
assertEquals("ACS.Itself", Script.evalKarateExpression(param, ctx).getAsString());
}
@Test
public void testMatchHandlesNonStringNullsGracefully() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: null }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json.foo", null, "[]", ctx).pass);
}
@Test
public void testMatchJsonObjectContains() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ foo: 'bar', baz: [1, 2, 3] }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ baz: [1, 2, 3], foo: 'bar' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ baz: [1, 2, 3] }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ foo: 'bar' }", ctx).pass);
}
@Test
public void testMatchJsonObjectPartialNotContains() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ a: 1, b: 2}", ctx);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: 1, b: 3 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: 1, b: '#string' }", ctx).pass);
}
@Test
public void testMatchJsonArrayContains() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: [1, 2, 3] }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.bar", null, "[1 ,2, 3]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "foo.bar", null, "[1]", ctx).pass);
}
@Test
public void testMatchContainsForSingleElements() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: [1, 2, 3] }", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "foo.bar", null, "1", ctx).pass);
Script.assign("json", "[{ foo: 1 }, { foo: 2 }, { foo: 3 }]", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ foo: 1 }", ctx).pass);
Script.assign("json", "[{ foo: 1 }]", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "{ foo: 1 }", ctx).pass);
}
@Test
public void testMatchJsonObjectErrorReporting() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ a: 1, b: 2, c: 3}", ctx);
AssertionResult ar = Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: 1, c: 3 }", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("actual value has 1 more key"));
}
@Test
public void testMatchJsonArrayErrorReporting() {
ScenarioContext ctx = getContext();
Script.assign("json", "[{ foo: 1 }, { foo: 2 }, { foo: 3 }]", ctx);
AssertionResult ar = Script.matchNamed(MatchType.EQUALS, "json", null, "[{ foo: 1 }, { foo: 2 }, { foo: 4 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("actual: 3, expected: 4"));
ar = Script.matchNamed(MatchType.CONTAINS, "json", null, "[{ foo: 1 }, { foo: 2 }, { foo: 4 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("$[*]"));
ar = Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "[{ foo: 3 }, { foo: 2 }, { foo: 0 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("$[*]"));
ar = Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "[{ foo: 3 }, { foo: 2 }, { foo: 1 }]", ctx);
assertTrue(ar.pass);
ar = Script.matchNamed(MatchType.CONTAINS_ONLY, "json", null, "[{ foo: 3 }, { foo: 2 }]", ctx);
assertFalse(ar.pass);
assertTrue(ar.message.contains("not the same size"));
}
@Test
public void testMatchStringEqualsAndContains() {
ScenarioContext ctx = getContext();
Script.assign("foo", "'hello world'", ctx);
// assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'hello world'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "'blah'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'blah'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "'hello world'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "foo", null, "'hello'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "'zoo'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "foo", null, "'blah'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_CONTAINS, "foo", null, "'world'", ctx).pass);
}
@Test
public void testKarateEnvAccessFromScript() {
FeatureContext featureContext = FeatureContext.forEnv("baz");
CallContext callContext = new CallContext(null, true);
ScenarioContext ctx = new ScenarioContext(featureContext, callContext, null, null);
Script.assign("foo", "function(){ return karate.env }", ctx);
Script.assign("bar", "call foo", ctx);
ScriptValue bar = ctx.vars.get("bar");
assertEquals("baz", bar.getValue());
// null
featureContext = FeatureContext.forEnv();
ctx = new ScenarioContext(featureContext, callContext, null, null);
Script.assign("foo", "function(){ return karate.env }", ctx);
Script.assign("bar", "call foo", ctx);
bar = ctx.vars.get("bar");
assertNull(bar.getValue());
}
@Test
public void testCallingFeatureWithNoArgument() {
ScenarioContext ctx = getContext();
Script.assign("foo", "call read('test-called.feature')", ctx);
ScriptValue a = Script.evalJsonPathOnVarByName("foo", "$.a", ctx);
assertEquals(1, a.getValue());
ScriptValue b = Script.evalJsonPathOnVarByName("foo", "$.b", ctx);
assertEquals(2, b.getValue());
}
@Test
public void testCallingFeatureWithVarOverrides() {
ScenarioContext ctx = getContext();
Script.assign("foo", "call read('test-called.feature') { c: 3 }", ctx);
ScriptValue a = Script.evalJsonPathOnVarByName("foo", "$.a", ctx);
assertEquals(1, a.getValue());
ScriptValue b = Script.evalJsonPathOnVarByName("foo", "$.b", ctx);
assertEquals(2, b.getValue());
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$.c", ctx);
assertEquals(3, c.getValue());
}
@Test
public void testCallingFeatureWithVarOverrideFromVariable() {
ScenarioContext ctx = getContext();
Script.assign("bar", "{ c: 3 }", ctx);
Script.assign("foo", "call read('test-called.feature') bar", ctx);
ScriptValue a = Script.evalJsonPathOnVarByName("foo", "$.a", ctx);
assertEquals(1, a.getValue());
ScriptValue b = Script.evalJsonPathOnVarByName("foo", "$.b", ctx);
assertEquals(2, b.getValue());
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$.c", ctx);
assertEquals(3, c.getValue());
}
@Test
public void testCallingFeatureWithList() {
ScenarioContext ctx = getContext();
Script.assign("foo", "call read('test-called.feature') [{c: 100}, {c: 200}, {c: 300}]", ctx);
ScriptValue c0 = Script.evalJsonPathOnVarByName("foo", "$[0].c", ctx);
assertEquals(100, c0.getValue());
ScriptValue c1 = Script.evalJsonPathOnVarByName("foo", "$[1].c", ctx);
assertEquals(200, c1.getValue());
ScriptValue c2 = Script.evalJsonPathOnVarByName("foo", "$[2].c", ctx);
assertEquals(300, c2.getValue());
}
@Test
public void testCallingFeatureThatEvaluatesEmbeddedExpressions() {
ScenarioContext ctx = getContext();
Script.assign("result", "call read('test-called-embedded.feature') { foo: 'world' }", ctx);
ScriptValue sv1 = Script.evalJsonPathOnVarByName("result", "$.json.hello", ctx);
assertEquals("world", sv1.getValue());
ScriptValue sv2 = Script.evalJsonPathOnVarByName("result", "$.xml.hello", ctx);
assertEquals("world", sv2.getValue());
}
@Test
public void testCallingFeatureThatEvaluatesEmbeddedExpressionsFromFileRead() {
ScenarioContext ctx = getContext();
Script.assign("result", "call read('test-called-embedded-file.feature') { foo: 'world' }", ctx);
ScriptValue sv1 = Script.evalJsonPathOnVarByName("result", "$.json.hello", ctx);
assertEquals("world", sv1.getValue());
ScriptValue sv2 = Script.evalJsonPathOnVarByName("result", "$.xml.hello", ctx);
assertEquals("world", sv2.getValue());
}
@Test
public void testCallingFeatureWithJsonCreatedByJavaScript() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return { c: 100} }", ctx);
Script.assign("res", "call fun", ctx);
Script.assign("foo", "call read('test-called.feature') res", ctx);
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$.c", ctx);
assertEquals(100, c.getValue());
}
@Test
public void testCallingFeatureWithJsonArrayCreatedByJavaScript() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [{ c: 100}] }", ctx);
Script.assign("res", "call fun", ctx);
Script.assign("foo", "call read('test-called.feature') res", ctx);
ScriptValue c = Script.evalJsonPathOnVarByName("foo", "$[0].c", ctx);
assertEquals(100, c.getValue());
}
@Test
public void testSetOnJsonArrayCreatedByJavaScript() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [{a: 1}, {a: 2}, {b: 3}] }", ctx);
Script.assign("json", "call fun", ctx);
Script.setValueByPath("json[1].a", null, "5", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "[{a: 1}, {a: 5}, {b: 3}]", ctx).pass);
}
@Test
public void testGetSyntaxForJson() {
ScenarioContext ctx = getContext();
Script.assign("foo", "[{baz: 1}, {baz: 2}, {baz: 3}]", ctx);
Script.assign("nums", "get foo[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "nums", null, "[1, 2, 3]", ctx).pass);
Script.assign("first", "get[0] foo[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "first", null, "1", ctx).pass);
Script.assign("second", "get[1] foo[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "second", null, "2", ctx).pass);
// alternative to get, usable in-line within match statements
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[*].baz", null, "$foo[*].baz", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "$foo[*].baz", null, "$foo[*].baz", ctx).pass);
Script.assign("foo", "{ bar: [{baz: 1}, {baz: 2}, {baz: 3}]}", ctx);
Script.assign("nums", "get foo.bar[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "nums", null, "[1, 2, 3]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.bar[*].baz", null, "$foo.bar[*].baz", ctx).pass);
Script.assign("nums", "get foo $.bar[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "nums", null, "[1, 2, 3]", ctx).pass);
Script.assign("response", "[{baz: 1}, {baz: 2}, {baz: 3}]", ctx);
Script.assign("second", "get[1] $[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "second", null, "2", ctx).pass);
Script.assign("third", "get[2] response $[*].baz", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "third", null, "3", ctx).pass);
}
@Test
public void testGetSyntaxForXml() {
ScenarioContext ctx = getContext();
Script.assign("foo", "<records>\n <record>a</record>\n <record>b</record>\n <record>c</record>\n</records>", ctx);
Script.assign("count", "get foo count(//record)", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "count", null, "3", ctx).pass);
}
@Test
public void testFromJsKarateCallFeatureWithNoArg() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return karate.call('test-called.feature') }", ctx);
Script.assign("res", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.a", null, "1", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.b", null, "2", ctx).pass);
}
@Test
public void testFromJsKarateCallFeatureWithJsonArg() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return karate.call('test-called.feature', {c: 3}) }", ctx);
Script.assign("res", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.a", null, "1", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.b", null, "2", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res.c", null, "3", ctx).pass);
}
@Test
public void testFromJsKarateGetForNonExistentVariable() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var foo = karate.get('foo'); return foo ? true : false }", ctx);
Script.assign("res", "fun()", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "false", ctx).pass);
}
@Test
public void testFromJsKarateGetForJsonArrayVariable() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return [1, 2, 3] }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "[1, 2, 3]", ctx).pass);
}
@Test
public void testFromJsKarateGetForJsonObjectVariableAndCallFeatureAndJs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "read('headers.js')", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "{ foo: 'bar_someValue' }", ctx).pass);
Script.assign("signin", "call read('signin.feature')", ctx);
Script.assign("ticket", "signin.ticket", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "ticket", null, "{ foo: 'bar' }", ctx).pass);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "{ foo: 'bar_someValue', baz: 'ban' }", ctx).pass);
}
@Test
public void testFromJsKarateJsonPath() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var foo = [{v:1},{v:2}]; return karate.jsonPath(foo, '$[*].v') }", ctx);
Script.assign("res", "call fun", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "[1, 2]", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "karate.jsonPath([{v:1},{v:2}], '$[*].v')", ctx).pass);
Script.assign("foo", "[{v:1},{v:2}]", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "karate.jsonPath(foo, '$[*].v')", ctx).pass);
}
@Test
public void testAssigningRawTextWhichOtherwiseConfusesKarate() {
ScenarioContext ctx = getContext();
try {
Script.assign("foo", "{ not json }", ctx);
fail("we expected this to fail");
} catch (InvalidJsonException e) {
logger.debug("expected {}", e.getMessage());
}
Script.assign(AssignType.TEXT, "foo", "{ not json }", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'{ not json }'", ctx).pass);
}
@Test
public void testBigDecimalsInJson() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ val: -1002.2000000000002 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000002 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000002 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
Script.assign("foo", "{ val: -1002.20 }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000001 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.2000000000000 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.2000000000000 }", ctx).pass);
Script.assign("foo", "{ val: -1002.2000000000001 }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
Script.assign("foo", "{ val: -1002.2000000000000 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.NOT_EQUALS, "foo", null, "{ val: -1002.20 }", ctx).pass);
}
@Test
public void testDollarInEmbeddedExpressions() {
ScenarioContext ctx = getContext();
Script.assign("temperature", "{ celsius: 100, fahrenheit: 212 }", ctx);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "temperature", null, "{ fahrenheit: 212 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "temperature", null, "{ fahrenheit: '#($.celsius * 1.8 + 32)' }", ctx).pass);
}
@Test
public void testOptionalAndUnMatchedActualKeys() {
ScenarioContext ctx = getContext();
Script.assign("expected", "{ a: 1, b: 2, c: '##null' }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "expected", null, "{ a: 1, b: 2, d: 3}", ctx).pass);
}
@Test
public void testValidationStringInsteadOfNumberInPredicate() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ bar: 5 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ == 5' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ > 0' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number? _ > 0' }", ctx).pass);
Script.assign("foo", "{ bar: '5' }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ == 5' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ === 5' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#? _ > 0' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ bar: '#number? _ > 0' }", ctx).pass);
}
@Test
public void testMatchMacroArray() {
ScenarioContext ctx = getContext();
Script.assign("foo", "['bar', 'baz']", ctx);
Script.assign("arr", "'#string'", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#array'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#number'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2]'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[1]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[_ == 2]'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[_ != 2]'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] arr'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (arr)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] #string'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] #number'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2] #string'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[1] arr'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[1] (arr)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2]? _.length == 3'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[2]? _.length == 4'", ctx).pass);
// non-root path
Script.assign("foo", "{ ban: ['bar', 'baz'], count: 2 }", ctx);
Script.assign("len", "2", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[] arr'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[] (arr)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[2] arr'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[2] (arr)'", ctx).pass);
// assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[$.count] #string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[foo.count] #string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[len] #string'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo.ban", null, "'#[_ < 3]'", ctx).pass);
}
@Test
public void testMatchMacroArrayComplex() {
ScenarioContext ctx = getContext();
Script.assign("foo", "[{ a: 1, b: 2 }, { a: 3, b: 4 }]", ctx);
Script.assign("bar", "{ a: '#number', b: '#number' }", ctx);
Script.assign("baz", "{ c: '#number' }", ctx);
Script.assign("ban", "{ b: '#number' }", ctx);
assertTrue(Script.matchNamed(MatchType.EACH_EQUALS, "foo", null, "bar", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^*bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^*ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^^ban)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^baz)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^*bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^^bar'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^^ban'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^*ban'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^ban'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^bar'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^ban'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^baz'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^*bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^*ban)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^^ban)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^baz)'", ctx).pass);
}
@Test
public void testMatchMacroArrayComplexContains() {
ScenarioContext ctx = getContext();
Script.assign("foo", "[{ a: 1, b: 2 }, { a: 3, b: 4 }]", ctx);
Script.assign("rev", "[{ a: 3, b: 4 }, { a: 1, b: 2 }]", ctx);
Script.assign("part", "[{ a: 1, b: 2 }]", ctx);
Script.assign("one", "{ a: 1, b: 2 }", ctx);
Script.assign("nopes", "[{ a: 6, b: 7 }, { a: 8, b: 9 }]", ctx);
Script.assign("nope", "{ a: 8, b: 9 }", ctx);
Script.assign("bar", "{ b: '#number' }", ctx);
Script.assign("baz", "{ c: '#number' }", ctx);
assertFalse(Script.matchNamed(MatchType.EACH_EQUALS, "foo", null, "bar", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(^*bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo[0]", null, "'#(!^baz)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] ^bar'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^bar'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] !^baz'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (^bar)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^bar)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#[] (!^baz)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^*foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^foo)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^rev)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^rev)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(rev)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^part)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^part)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^part)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^one)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^one)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^one)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(nopes)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^nopes)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^nopes)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^nopes)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(nope)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^nope)'", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(^^nope)'", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "'#(!^nope)'", ctx).pass);
}
@Test
public void testSchemaLikeAndOptionalKeys() {
ScenarioContext ctx = getContext();
Script.assign("child", "{ hello: '#string' }", ctx);
Script.assign("json", "{ foo: 'bar', baz: [1, 2, 3]}", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number', child: '##(child)' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number', child: '#(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##[] #number' }", ctx).pass);
Script.assign("json", "{ foo: 'bar', child: { hello: 'world' } }", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '#[] #number', child: '#(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##[] #number', child: '#(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##[] #number', child: '##(child)' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', blah: '##number', child: '#(child)' }", ctx).pass);
Script.assign("json", "{ foo: 'bar', baz: null }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ foo: '#string', baz: '##string' }", ctx).pass);
Script.assign("json", "null", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "'##string'", ctx).pass);
}
@Test
public void testPresentNotPresentAndOptionalNulls() {
ScenarioContext ctx = getContext();
Script.assign("json", "{ }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '##null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: null }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#present' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#present' }", ctx).pass);
Script.assign("json", "{ a: 1 }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#present' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#present' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: null }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '##null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
Script.assign("json", "{ a: null }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: null }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: null }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '##null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#null' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '##null' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_EQUALS, "json", null, "{ a: '#notpresent' }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.NOT_CONTAINS, "json", null, "{ a: '#notpresent' }", ctx).pass);
}
@Test
public void testJsonPathWhenActualIsEmptyString() {
ScenarioContext ctx = getContext();
Script.assign("response", "''", ctx);
assertFalse(Script.matchNamed(MatchType.EQUALS, "$.foo", null, "'#notnull'", ctx).pass);
}
@Test
public void testReplace() {
ScenarioContext ctx = getContext();
assertEquals("foo", Script.replacePlaceholderText("foo", "foo", "'bar'", ctx));
assertEquals("bar", Script.replacePlaceholderText("<foo>", "foo", "'bar'", ctx));
assertEquals("bar", Script.replacePlaceholderText("<foo>", "foo", "'bar'", ctx));
assertEquals("bar", Script.replacePlaceholderText("@@foo@@", "@@foo@@", "'bar'", ctx));
assertEquals("bar bar bar", Script.replacePlaceholderText("<foo> <foo> <foo>", "foo", "'bar'", ctx));
}
@Test
public void testEvalFromJs() {
ScenarioContext ctx = getContext();
Script.assign("temperature", "{ celsius: 100, fahrenheit: 212 }", ctx);
Script.assign("res", "karate.eval('temperature.celsius')", ctx);
Script.assign("bool", "karate.eval('temperature.celsius == 100')", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "res", null, "100", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bool", null, "true", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "temperature.fahrenheit", null, "karate.eval('temperature.celsius * 1.8 + 32')", ctx).pass);
}
@Test
public void testRemoveIfNullMultiple() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ first: 'bar', second: '##(null)', third: '##(null)' }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ first: 'bar' }", ctx).pass);
}
@Test
public void testMatchingIsStrictForDataTypes() {
ScenarioContext ctx = getContext();
Script.assign("foo", "{ a: '5', b: 5, c: true, d: 'true' }", ctx);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ a: 5 }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ b: '5' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ c: 'true' }", ctx).pass);
assertFalse(Script.matchNamed(MatchType.CONTAINS, "foo", null, "{ d: true }", ctx).pass);
}
@Test
public void testTypeConversion() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.JSON, "foo", "[]", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "[]", ctx).pass);
Script.assign(AssignType.JSON, "foo", "{}", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{}", ctx).pass);
}
@Test
public void testBinaryMatching() {
ScenarioContext ctx = getContext();
Script.assign(AssignType.BYTE_ARRAY, "data", "read('file:src/main/resources/res/karate-logo.png')", ctx, true);
assertTrue(Script.matchNamed(MatchType.EQUALS, "data", null, "read('file:src/main/resources/res/karate-logo.png')", ctx).pass);
}
@Test
public void testJsonCyclicReferences() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ var env = 'dev'; var config = { env: env }; return config }", ctx);
Script.assign("json", "fun()", ctx);
Map value = (Map) ctx.vars.get("json").getValue();
value.put("child", value);
value = JsonUtils.removeCyclicReferences(value);
DocumentContext doc = JsonUtils.toJsonDoc(value);
Map temp = doc.read("$");
Match.equals(temp, "{ env: 'dev', child: '#java.util.LinkedHashMap' }");
}
@Test
public void testMatchFunctionOnLhs() {
ScenarioContext ctx = getContext();
Script.assign("fun", "function(){ return true }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "fun()", null, "true", ctx).pass);
Script.assign("fun", "function(){ return { a: 1 } }", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "fun()", null, "{ a: 1 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "fun().a", null, "1", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "(fun().a)", null, "1", ctx).pass);
}
@Test
public void testKarateToJson() {
ScenarioContext ctx = getContext();
Script.assign("SP", "Java.type('com.intuit.karate.SimplePojo')", ctx);
Script.assign("sp", "new SP()", ctx);
Script.evalJsExpression("sp.bar = 10", ctx);
Script.assign("foo", "karate.toJson(sp)", ctx);
Script.assign("bar", "karate.toJson(sp, true)", ctx);
assertTrue(Script.matchNamed(MatchType.EQUALS, "foo", null, "{ foo: null, bar: 10 }", ctx).pass);
assertTrue(Script.matchNamed(MatchType.EQUALS, "bar", null, "{ bar: 10 }", ctx).pass);
}
@Test
public void notEqualMatchTest(){
Map<String, Object> result = Runner.runFeature(getClass(), "core/notEqualMatch.feature", null, true);
assertNotEquals(result.get("a"),result.get("b"));
}
}
|
add a test case for the thing that the prev 2 commits missed
|
karate-core/src/test/java/com/intuit/karate/ScriptTest.java
|
add a test case for the thing that the prev 2 commits missed
|
|
Java
|
mit
|
ff0502792c6afd8b4ff2e328346c4bd0afa9fa49
| 0
|
bcgit/bc-java,bcgit/bc-java,bcgit/bc-java
|
package org.bouncycastle.tls;
import java.io.IOException;
import org.bouncycastle.tls.crypto.TlsCryptoParameters;
import org.bouncycastle.tls.crypto.TlsSecret;
/**
* Base interface for a class that decrypts TLS secrets.
*/
public interface TlsCredentialedDecryptor
extends TlsCredentials
{
/**
* Decrypt the passed in cipher text using the parameters available.
*
* @param cryptoParams the parameters to use for the decryption.
* @param ciphertext the cipher text containing the secret.
* @return a TlS secret.
* @throws IOException on a parsing or decryption error.
*/
TlsSecret decrypt(TlsCryptoParameters cryptoParams, byte[] ciphertext) throws IOException;
}
|
tls/src/main/java/org/bouncycastle/tls/TlsCredentialedDecryptor.java
|
package org.bouncycastle.tls;
import java.io.IOException;
import org.bouncycastle.tls.crypto.TlsCryptoParameters;
import org.bouncycastle.tls.crypto.TlsSecret;
public interface TlsCredentialedDecryptor
extends TlsCredentials
{
TlsSecret decrypt(TlsCryptoParameters cryptoParams, byte[] ciphertext) throws IOException;
}
|
JavaDoc
|
tls/src/main/java/org/bouncycastle/tls/TlsCredentialedDecryptor.java
|
JavaDoc
|
|
Java
|
mit
|
6b85febed32dc429af235fe17ba8a74af56371b8
| 0
|
jrejaud/WearSocket
|
package com.github.jrejaud.wear_socket;
import android.app.Activity;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.wearable.CapabilityApi;
import com.google.android.gms.wearable.CapabilityInfo;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataItem;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.MessageApi;
import com.google.android.gms.wearable.MessageEvent;
import com.google.android.gms.wearable.Node;
import com.google.android.gms.wearable.NodeApi;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.Wearable;
import com.google.gson.Gson;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Set;
import java.util.concurrent.Semaphore;
/**
* Created by Jordan on 6/23/2015.
*/
public class WearSocket implements MessageApi.MessageListener, DataApi.DataListener {
private static WearSocket ourInstance = new WearSocket();
private GoogleApiClient googleApiClient = null;
private final Semaphore nodeFound = new Semaphore(0,true);
private String TAG = "WearSocket";
private Context context;
private String nodeID = null;
private String receiverPath= null;
private String dataPath = null;
private String capability = null;
private MessageListener messageReceived;
private DataListener dataChanged;
private onErrorListener errorListener;
public static WearSocket getInstance() {
return ourInstance;
}
private WearSocket() {
}
//********************************************************************
//Setup and State Handling
//********************************************************************
public void setupAndConnect(final Context context, final String capability, onErrorListener errorListener) {
this.context = context;
this.errorListener = errorListener;
this.capability = capability;
Log.d(TAG, "Starting up Google Api Client");
googleApiClient = new GoogleApiClient.Builder(context)
.addConnectionCallbacks(new GoogleApiClient.ConnectionCallbacks() {
@Override
public void onConnected(Bundle bundle) {
Log.d(TAG, "Google Api Client Connected, bundle: " + bundle);
findCapableNode(capability);
}
@Override
public void onConnectionSuspended(int i) {
Log.d(TAG, "onConnectedSuspended: " + i);
}
})
.addOnConnectionFailedListener(new GoogleApiClient.OnConnectionFailedListener() {
@Override
public void onConnectionFailed(ConnectionResult result) {
throw new RuntimeException(result.toString());
}
})
.addApi(Wearable.API)
.build();
googleApiClient.connect();
}
private void findCapableNode(final String capability) {
Log.d(TAG,"Start looking for a capable node");
new Thread(new Runnable() {
@Override
public void run() {
CapabilityApi.GetCapabilityResult result =
Wearable.CapabilityApi.getCapability(googleApiClient,capability,CapabilityApi.FILTER_REACHABLE).await();
CapabilityInfo capabilityInfo = result.getCapability();
Set<Node> nodes = capabilityInfo.getNodes();
String nodeID = findBestNodeId(nodes);
Log.d(TAG,"Node found: "+nodeID);
if (nodeID==null) {
//This might be caused by there not being a watch paired to the device
//Handler is to run it in the UI thread
new Handler().post(new Runnable() {
@Override
public void run() {
errorListener.onError(new Throwable("Error, cannot find a connected device"));
}
});
return;
}
WearSocket.this.nodeID = nodeID;
nodeFound.release();
}
}).start();
}
private String findBestNodeId(Set<Node> nodes) {
String bestNodeId = null;
Log.d(TAG,"Found Set of nodes: "+nodes.size());
for (Node node : nodes) {
if (node.isNearby()) {
Log.d(TAG,"Found nearby node: "+node.getId());
return node.getId();
}
bestNodeId = node.getId();
}
Log.d(TAG,"No nearby node found, settling for: "+bestNodeId);
return bestNodeId;
}
@Deprecated
private void findFirstNode() {
new Thread(new Runnable() {
@Override
public void run() {
Log.d(TAG, "Start Node Search");
NodeApi.GetConnectedNodesResult nodes =
Wearable.NodeApi
.getConnectedNodes(googleApiClient).await();
if (nodes.getNodes() == null) {
Log.d(TAG, "No nodes found");
showErrorAndCloseApp("Error, cannot find any nodes, make sure watch is paired to phone",true);
}
//TODO need to make this compatible with getting android wear connection over wifi instead of just bluetooth
nodeID = nodes.getNodes().get(0).getId();
Log.d(TAG,"Node found: "+ nodeID);
nodeFound.release();
}
}).start();
}
//*************************************************************************************
private void showErrorAndCloseApp(String message, boolean closeApp) {
Toast.makeText(context,message,Toast.LENGTH_SHORT).show();
Log.e(TAG, message);
disconnect();
if (closeApp) {
((Activity)context).finish();
}
}
public void disconnect() {
if (googleApiClient!=null) {
googleApiClient.disconnect();
}
Wearable.MessageApi.removeListener(googleApiClient, this);
Wearable.DataApi.removeListener(googleApiClient, this);
}
//********************************************************************
//Send Message
//********************************************************************
public void sendMessage(String path, String message) {
new sendMessageTask(path,message).execute();
}
private class sendMessageTask extends AsyncTask<Void, Void, Boolean> {
private String path;
private String message;
public sendMessageTask(String path, String message) {
this.path = path;
this.message = message;
}
@Override
protected Boolean doInBackground(Void... voids) {
while (nodeID ==null) {
Log.d(TAG,"Node not found yet, waiting until one is found to send message");
try {
nodeFound.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Log.d(TAG,"Sending message to nodeID: "+ nodeID);
MessageApi.SendMessageResult result = Wearable.MessageApi.sendMessage(
googleApiClient, nodeID, path, message.getBytes()).await();
if (!result.getStatus().isSuccess()) {
return false;
}
return true;
}
@Override
protected void onPostExecute(Boolean messageResponse) {
if (messageResponse) {
Log.d(TAG, "Message " + path + " : " + message + " sent successfully");
} else {
showErrorAndCloseApp("Could not send message "+path+" : "+message,false);
}
super.onPostExecute(messageResponse);
}
}
//********************************************************************
//Receive Messages
//********************************************************************
public void startMessageListener(Context context, String path) {
messageReceived = (MessageListener) context;
this.receiverPath = path;
Wearable.MessageApi.addListener(googleApiClient, this);
}
public interface MessageListener {
void messageReceived(String path, String message);
}
@Override
public void onMessageReceived(final MessageEvent messageEvent) {
if (messageEvent.getPath().equals(receiverPath)) {
((Activity)context).runOnUiThread(new Runnable() {
@Override
public void run() {
String data = null;
try {
data = new String(messageEvent.getData(),"UTF-8");
messageReceived.messageReceived(messageEvent.getPath(), data);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
});
}
}
//********************************************************************
//Update Data
//********************************************************************
public void updateDataItem(final String path, final String key, final Object object) {
new Thread(new Runnable() {
@Override
public void run() {
Log.d(TAG,"Updating data item: "+key+" "+object.toString());
Gson gson = new Gson();
String jsonData = gson.toJson(object);
if (!path.startsWith("/")) {
Log.e(TAG,"Path "+path+" must start with a /!");
return;
}
PutDataMapRequest putDataMapRequest = PutDataMapRequest.create(path);
putDataMapRequest.getDataMap().putString(key, jsonData);
PutDataRequest putDataRequest = putDataMapRequest.asPutDataRequest();
PendingResult<DataApi.DataItemResult> pendingResult = Wearable.DataApi.putDataItem(googleApiClient, putDataRequest);
DataApi.DataItemResult result = pendingResult.await();
Log.d(TAG,result.toString());
Log.d(TAG, "Update data item result: " + result.getStatus().getStatusMessage());
}
}).start();
}
//********************************************************************
//Receive Data Changes and set keytypes
//********************************************************************
public void startDataListener(Context context, String path) {
Log.d(TAG,"Starting data listener");
dataChanged = (DataListener) context;
dataPath = path;
Wearable.DataApi.addListener(googleApiClient,this);
keyTypes = new HashMap<>();
}
public interface DataListener {
void dataChanged(String key, Object data);
}
@Override
public void onDataChanged(final DataEventBuffer dataEventBuffer) {
Log.d(TAG,"Data change event received");
for (DataEvent dataEvent : dataEventBuffer) {
DataItem dataItem = dataEvent.getDataItem();
if (dataItem.getUri().getPath().compareTo(dataPath) == 0) {
DataMap dataMap = DataMapItem.fromDataItem(dataItem).getDataMap();
Set<String> keys = dataMap.keySet();
for (String key : keys) {
String data = dataMap.getString(key);
Gson gson = new Gson();
if (!keyTypes.containsKey(key)) {
Log.e(TAG,key+" key not associated to a datatype, please setKeyDataType");
return;
}
Object object = gson.fromJson(data, keyTypes.get(key));
dataChanged.dataChanged(key, object);
}
}
}
}
private HashMap<String,Type> keyTypes;
public void setKeyDataType(String key, Type type) {
keyTypes.put(key, type);
}
//********************************************************************
//Error Listener
//********************************************************************
public interface onErrorListener {
void onError(Throwable throwable);
}
}
|
wear-socket/src/main/java/com/github/jrejaud/wear_socket/WearSocket.java
|
package com.github.jrejaud.wear_socket;
import android.app.Activity;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import com.google.android.gms.common.ConnectionResult;
import com.google.android.gms.common.api.GoogleApiClient;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.wearable.CapabilityApi;
import com.google.android.gms.wearable.CapabilityInfo;
import com.google.android.gms.wearable.DataApi;
import com.google.android.gms.wearable.DataEvent;
import com.google.android.gms.wearable.DataEventBuffer;
import com.google.android.gms.wearable.DataItem;
import com.google.android.gms.wearable.DataMap;
import com.google.android.gms.wearable.DataMapItem;
import com.google.android.gms.wearable.MessageApi;
import com.google.android.gms.wearable.MessageEvent;
import com.google.android.gms.wearable.Node;
import com.google.android.gms.wearable.NodeApi;
import com.google.android.gms.wearable.PutDataMapRequest;
import com.google.android.gms.wearable.PutDataRequest;
import com.google.android.gms.wearable.Wearable;
import com.google.gson.Gson;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Set;
import java.util.concurrent.Semaphore;
/**
* Created by Jordan on 6/23/2015.
*/
public class WearSocket implements MessageApi.MessageListener, DataApi.DataListener {
private static WearSocket ourInstance = new WearSocket();
private GoogleApiClient googleApiClient = null;
private final Semaphore nodeFound = new Semaphore(0,true);
private String TAG = "WearSocket";
private Context context;
private String nodeID = null;
private String receiverPath= null;
private String dataPath = null;
private String capability = null;
private MessageListener messageReceived;
private DataListener dataChanged;
private onErrorListener errorListener;
public static WearSocket getInstance() {
return ourInstance;
}
private WearSocket() {
}
//********************************************************************
//Setup and State Handling
//********************************************************************
public void setupAndConnect(final Context context, final String capability, onErrorListener errorListener) {
this.context = context;
this.errorListener = errorListener;
this.capability = capability;
Log.d(TAG, "Starting up Google Api Client");
googleApiClient = new GoogleApiClient.Builder(context)
.addConnectionCallbacks(new GoogleApiClient.ConnectionCallbacks() {
@Override
public void onConnected(Bundle bundle) {
Log.d(TAG, "Google Api Client Connected, bundle: " + bundle);
findCapableNode(capability);
}
@Override
public void onConnectionSuspended(int i) {
Log.d(TAG, "onConnectedSuspended: " + i);
}
})
.addOnConnectionFailedListener(new GoogleApiClient.OnConnectionFailedListener() {
@Override
public void onConnectionFailed(ConnectionResult result) {
throw new RuntimeException(result.toString());
}
})
.addApi(Wearable.API)
.build();
googleApiClient.connect();
}
private void findCapableNode(final String capability) {
Log.d(TAG,"Start looking for a capable node");
new Thread(new Runnable() {
@Override
public void run() {
CapabilityApi.GetCapabilityResult result =
Wearable.CapabilityApi.getCapability(googleApiClient,capability,CapabilityApi.FILTER_REACHABLE).await();
CapabilityInfo capabilityInfo = result.getCapability();
Set<Node> nodes = capabilityInfo.getNodes();
String nodeID = findBestNodeId(nodes);
Log.d(TAG,"Node found: "+nodeID);
if (nodeID==null) {
//This might be cause by there not being a watch paired to the device
errorListener.onError(new Throwable("Error, cannot find a connected device"));
return;
}
WearSocket.this.nodeID = nodeID;
nodeFound.release();
}
}).start();
}
private String findBestNodeId(Set<Node> nodes) {
String bestNodeId = null;
Log.d(TAG,"Found Set of nodes: "+nodes.size());
for (Node node : nodes) {
if (node.isNearby()) {
Log.d(TAG,"Found nearby node: "+node.getId());
return node.getId();
}
bestNodeId = node.getId();
}
Log.d(TAG,"No nearby node found, settling for: "+bestNodeId);
return bestNodeId;
}
@Deprecated
private void findFirstNode() {
new Thread(new Runnable() {
@Override
public void run() {
Log.d(TAG, "Start Node Search");
NodeApi.GetConnectedNodesResult nodes =
Wearable.NodeApi
.getConnectedNodes(googleApiClient).await();
if (nodes.getNodes() == null) {
Log.d(TAG, "No nodes found");
showErrorAndCloseApp("Error, cannot find any nodes, make sure watch is paired to phone",true);
}
//TODO need to make this compatible with getting android wear connection over wifi instead of just bluetooth
nodeID = nodes.getNodes().get(0).getId();
Log.d(TAG,"Node found: "+ nodeID);
nodeFound.release();
}
}).start();
}
//*************************************************************************************
private void showErrorAndCloseApp(String message, boolean closeApp) {
Toast.makeText(context,message,Toast.LENGTH_SHORT).show();
Log.e(TAG, message);
disconnect();
if (closeApp) {
((Activity)context).finish();
}
}
public void disconnect() {
if (googleApiClient!=null) {
googleApiClient.disconnect();
}
Wearable.MessageApi.removeListener(googleApiClient, this);
Wearable.DataApi.removeListener(googleApiClient, this);
}
//********************************************************************
//Send Message
//********************************************************************
public void sendMessage(String path, String message) {
new sendMessageTask(path,message).execute();
}
private class sendMessageTask extends AsyncTask<Void, Void, Boolean> {
private String path;
private String message;
public sendMessageTask(String path, String message) {
this.path = path;
this.message = message;
}
@Override
protected Boolean doInBackground(Void... voids) {
while (nodeID ==null) {
Log.d(TAG,"Node not found yet, waiting until one is found to send message");
try {
nodeFound.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Log.d(TAG,"Sending message to nodeID: "+ nodeID);
MessageApi.SendMessageResult result = Wearable.MessageApi.sendMessage(
googleApiClient, nodeID, path, message.getBytes()).await();
if (!result.getStatus().isSuccess()) {
return false;
}
return true;
}
@Override
protected void onPostExecute(Boolean messageResponse) {
if (messageResponse) {
Log.d(TAG, "Message " + path + " : " + message + " sent successfully");
} else {
showErrorAndCloseApp("Could not send message "+path+" : "+message,false);
}
super.onPostExecute(messageResponse);
}
}
//********************************************************************
//Receive Messages
//********************************************************************
public void startMessageListener(Context context, String path) {
messageReceived = (MessageListener) context;
this.receiverPath = path;
Wearable.MessageApi.addListener(googleApiClient, this);
}
public interface MessageListener {
void messageReceived(String path, String message);
}
@Override
public void onMessageReceived(final MessageEvent messageEvent) {
if (messageEvent.getPath().equals(receiverPath)) {
((Activity)context).runOnUiThread(new Runnable() {
@Override
public void run() {
String data = null;
try {
data = new String(messageEvent.getData(),"UTF-8");
messageReceived.messageReceived(messageEvent.getPath(), data);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
}
});
}
}
//********************************************************************
//Update Data
//********************************************************************
public void updateDataItem(final String path, final String key, final Object object) {
new Thread(new Runnable() {
@Override
public void run() {
Log.d(TAG,"Updating data item: "+key+" "+object.toString());
Gson gson = new Gson();
String jsonData = gson.toJson(object);
if (!path.startsWith("/")) {
Log.e(TAG,"Path "+path+" must start with a /!");
return;
}
PutDataMapRequest putDataMapRequest = PutDataMapRequest.create(path);
putDataMapRequest.getDataMap().putString(key, jsonData);
PutDataRequest putDataRequest = putDataMapRequest.asPutDataRequest();
PendingResult<DataApi.DataItemResult> pendingResult = Wearable.DataApi.putDataItem(googleApiClient, putDataRequest);
DataApi.DataItemResult result = pendingResult.await();
Log.d(TAG,result.toString());
Log.d(TAG, "Update data item result: " + result.getStatus().getStatusMessage());
}
}).start();
}
//********************************************************************
//Receive Data Changes and set keytypes
//********************************************************************
public void startDataListener(Context context, String path) {
Log.d(TAG,"Starting data listener");
dataChanged = (DataListener) context;
dataPath = path;
Wearable.DataApi.addListener(googleApiClient,this);
keyTypes = new HashMap<>();
}
public interface DataListener {
void dataChanged(String key, Object data);
}
@Override
public void onDataChanged(final DataEventBuffer dataEventBuffer) {
Log.d(TAG,"Data change event received");
for (DataEvent dataEvent : dataEventBuffer) {
DataItem dataItem = dataEvent.getDataItem();
if (dataItem.getUri().getPath().compareTo(dataPath) == 0) {
DataMap dataMap = DataMapItem.fromDataItem(dataItem).getDataMap();
Set<String> keys = dataMap.keySet();
for (String key : keys) {
String data = dataMap.getString(key);
Gson gson = new Gson();
if (!keyTypes.containsKey(key)) {
Log.e(TAG,key+" key not associated to a datatype, please setKeyDataType");
return;
}
Object object = gson.fromJson(data, keyTypes.get(key));
dataChanged.dataChanged(key, object);
}
}
}
}
private HashMap<String,Type> keyTypes;
public void setKeyDataType(String key, Type type) {
keyTypes.put(key, type);
}
//********************************************************************
//Error Listener
//********************************************************************
public interface onErrorListener {
void onError(Throwable throwable);
}
}
|
Run throwable on UI thread
|
wear-socket/src/main/java/com/github/jrejaud/wear_socket/WearSocket.java
|
Run throwable on UI thread
|
|
Java
|
mit
|
90b04582ba1ad028f2fd0d0d613b52011e26d90f
| 0
|
hpautonomy/find,hpe-idol/find,hpautonomy/find,hpe-idol/java-powerpoint-report,hpautonomy/find,hpe-idol/find,hpe-idol/find,hpautonomy/find,hpe-idol/find,hpautonomy/find,hpe-idol/find,hpe-idol/java-powerpoint-report
|
package com.autonomy.abc.selenium.page;
import com.autonomy.abc.selenium.AppElement;
import com.autonomy.abc.selenium.element.ModalView;
import com.autonomy.abc.selenium.menubar.NavBarTabId;
import com.autonomy.abc.selenium.menubar.SideNavBar;
import com.autonomy.abc.selenium.menubar.TopNavBar;
import com.autonomy.abc.selenium.util.AbstractMainPagePlaceholder;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
public class UsersPage extends AppElement implements AppPage {
public UsersPage(final TopNavBar topNavBar, final WebElement $el) {
super($el, topNavBar.getDriver());
}
@Override
public void navigateToPage() { getDriver().get("users"); }
public WebElement createUserButton() {
return findElement(By.cssSelector("#create-user"));
}
public WebElement createButton() {
return ModalView.getVisibleModalView(getDriver()).findElement(By.xpath(".//button[contains(text(), 'Create')]"));
}
public void addUsername(final String userName) {
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-username']")).clear();
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-username']")).sendKeys(userName);
}
public void clearPasswords() {
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-password']")).clear();
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-passwordConfirm']")).clear();
}
public void addAndConfirmPassword(final String password, final String passwordConfirm) {
final WebElement passwordElement = ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-password']"));
passwordElement.clear();
passwordElement.sendKeys(password);
final WebElement passwordConfirmElement = ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-passwordConfirm']"));
passwordConfirmElement.clear();
passwordConfirmElement.sendKeys(passwordConfirm);
}
public void createNewUser(final String userName, final String password, final String userLevel) {
addUsername(userName);
addAndConfirmPassword(password, password);
ModalView.getVisibleModalView(getDriver()).findElement(By.xpath(".//option[text() = '" + userLevel + "']")).click();
createButton().click();
loadOrFadeWait();
}
public void closeModal() {
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[data-dismiss='modal']")).click();
loadOrFadeWait();
}
public void deleteOtherUsers() {
for (final WebElement deleteButton : getTable().findElements(By.cssSelector("button"))) {
if (!isAttributePresent(deleteButton, "disabled")) {
loadOrFadeWait();
deleteButton.click();
loadOrFadeWait();
findElement(By.cssSelector(".popover-content .users-delete-confirm")).click();
}
}
}
public int countNumberOfUsers() {
return getTable().findElements(By.cssSelector("tbody tr")).size();
}
public void deleteUser(final String userName) {
loadOrFadeWait();
deleteButton(userName).click();
loadOrFadeWait();
findElement(By.cssSelector(".popover-content .users-delete-confirm")).click();
}
public WebElement deleteButton(final String userName) {
return getUserRow(userName).findElement(By.cssSelector("button"));
}
public WebElement getTable() {
return findElement(By.cssSelector("#users-current-admins"));
}
public WebElement getTableUserTypeLink(final String userName) {
return getUserRow(userName).findElement(By.cssSelector(".role"));
}
public void selectTableUserType(final String userName, final String type) {
getUserRow(userName).findElement(By.cssSelector(".input-admin")).findElement(By.xpath(".//*[text() = '" + type + "']")).click();
}
public WebElement getTableUserPasswordLink(final String userName) {
return getUserRow(userName).findElement(By.cssSelector(".pw"));
}
public WebElement getTableUserPasswordBox(final String userName) {
return getUserRow(userName).findElement(By.cssSelector("[type='password']"));
}
public WebElement getUserRow(final String userName) {
return findElement(By.xpath(".//span[contains(text(), '" + userName + "')]/../../.."));
}
public void changePassword(final String userName, final String newPassword) {
getTableUserPasswordLink(userName).click();
getTableUserPasswordBox(userName).clear();
getTableUserPasswordBox(userName).sendKeys(newPassword);
getUserRow(userName).findElement(By.cssSelector(".editable-submit")).click();
}
public String getSignedInUserName() {
return findElement(By.cssSelector(".profile-element strong")).getText();
}
public static class Placeholder extends AbstractMainPagePlaceholder<UsersPage> {
public Placeholder(final AppBody body, final SideNavBar mainTabBar, final TopNavBar topNavBar) {
super(body, mainTabBar, topNavBar, "users", NavBarTabId.USERS_PAGE, false);
}
@Override
protected UsersPage convertToActualType(final WebElement element) {
return new UsersPage(topNavBar, element);
}
}
}
|
src/main/java/com/autonomy/abc/selenium/page/UsersPage.java
|
package com.autonomy.abc.selenium.page;
import com.autonomy.abc.selenium.AppElement;
import com.autonomy.abc.selenium.element.ModalView;
import com.autonomy.abc.selenium.menubar.NavBarTabId;
import com.autonomy.abc.selenium.menubar.SideNavBar;
import com.autonomy.abc.selenium.menubar.TopNavBar;
import com.autonomy.abc.selenium.util.AbstractMainPagePlaceholder;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
public class UsersPage extends AppElement implements AppPage {
public UsersPage(final TopNavBar topNavBar, final WebElement $el) {
super($el, topNavBar.getDriver());
}
@Override
public void navigateToPage() { getDriver().get("users"); }
public WebElement createUserButton() {
return findElement(By.cssSelector("#create-user"));
}
public WebElement createButton() {
return ModalView.getVisibleModalView(getDriver()).findElement(By.xpath(".//button[contains(text(), 'Create')]"));
}
public void addUsername(final String userName) {
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-username']")).clear();
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-username']")).sendKeys(userName);
}
public void clearPasswords() {
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-password']")).clear();
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-passwordConfirm']")).clear();
}
public void addAndConfirmPassword(final String password, final String passwordConfirm) {
final WebElement passwordElement = ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-password']"));
passwordElement.clear();
passwordElement.sendKeys(password);
final WebElement passwordConfirmElement = ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[name='create-users-passwordConfirm']"));
passwordConfirmElement.clear();
passwordConfirmElement.sendKeys(passwordConfirm);
}
public void createNewUser(final String userName, final String password, final String userLevel) {
addUsername(userName);
addAndConfirmPassword(password, password);
ModalView.getVisibleModalView(getDriver()).findElement(By.xpath(".//option[text() = '" + userLevel + "']")).click();
createButton().click();
loadOrFadeWait();
}
public void closeModal() {
ModalView.getVisibleModalView(getDriver()).findElement(By.cssSelector("[data-dismiss='modal']")).click();
loadOrFadeWait();
}
public void deleteOtherUsers() {
for (final WebElement deleteButton : getTable().findElements(By.cssSelector("button"))) {
if (!deleteButton.getAttribute("class").contains("disabled")) {
loadOrFadeWait();
deleteButton.click();
loadOrFadeWait();
findElement(By.cssSelector(".popover-content .users-delete-confirm")).click();
}
}
}
public int countNumberOfUsers() {
return getTable().findElements(By.cssSelector("tbody tr")).size();
}
public void deleteUser(final String userName) {
loadOrFadeWait();
deleteButton(userName).click();
loadOrFadeWait();
findElement(By.cssSelector(".popover-content .users-delete-confirm")).click();
}
public WebElement deleteButton(final String userName) {
return getUserRow(userName).findElement(By.cssSelector("button"));
}
public WebElement getTable() {
return findElement(By.cssSelector("#users-current-admins"));
}
public WebElement getTableUserTypeLink(final String userName) {
return getUserRow(userName).findElement(By.cssSelector(".role"));
}
public void selectTableUserType(final String userName, final String type) {
getUserRow(userName).findElement(By.cssSelector(".input-admin")).findElement(By.xpath(".//*[text() = '" + type + "']")).click();
}
public WebElement getTableUserPasswordLink(final String userName) {
return getUserRow(userName).findElement(By.cssSelector(".pw"));
}
public WebElement getTableUserPasswordBox(final String userName) {
return getUserRow(userName).findElement(By.cssSelector("[type='password']"));
}
public WebElement getUserRow(final String userName) {
return findElement(By.xpath(".//span[contains(text(), '" + userName + "')]/../../.."));
}
public void changePassword(final String userName, final String newPassword) {
getTableUserPasswordLink(userName).click();
getTableUserPasswordBox(userName).clear();
getTableUserPasswordBox(userName).sendKeys(newPassword);
getUserRow(userName).findElement(By.cssSelector(".editable-submit")).click();
}
public String getSignedInUserName() {
return findElement(By.cssSelector(".profile-element strong")).getText();
}
public static class Placeholder extends AbstractMainPagePlaceholder<UsersPage> {
public Placeholder(final AppBody body, final SideNavBar mainTabBar, final TopNavBar topNavBar) {
super(body, mainTabBar, topNavBar, "users", NavBarTabId.USERS_PAGE, false);
}
@Override
protected UsersPage convertToActualType(final WebElement element) {
return new UsersPage(topNavBar, element);
}
}
}
|
[abc] more user type none tests [rev:matthew.gordon2]
[git-p4: depot-paths = "//depot/products/frontend/abc/master/": change = 1199375]
|
src/main/java/com/autonomy/abc/selenium/page/UsersPage.java
|
[abc] more user type none tests [rev:matthew.gordon2]
|
|
Java
|
mit
|
723f589e76f229beb5f593097b743b68e0909396
| 0
|
dhh1128/bibifi,dhh1128/bibifi,dhh1128/bibifi
|
package org.builditbreakit.seada.logappend;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import org.builditbreakit.seada.common.TransitionEvent;
import org.builditbreakit.seada.common.data.VisitorType;
import org.junit.Test;
public class AppendCommandOracleTest {
private static long NO_ROOM = -1;
private AppendCommand cmd;
@Test
public void testEmployeeArriveBuilding() {
testCommand("-T 1 -K secret -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testArgsAfterLog() {
testCommand("-K secret -A log1 -E Fred -T 1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testDuplicateRoom() {
testCommand("-T 1 -K secret -R 10 -A -E Fred -R 5 log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", 5, "log1");
}
@Test
public void testDuplicateTimes() {
testCommand("-T 1 -T 2 -K secret -A -E Fred log1");
assertCommand(2, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testTripleTimesInOrder() {
testCommand("-T 1 -T 2 -K secret -A -E Fred -T 3 log1");
assertCommand(3, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testTripleTimesOutOfOrder() {
testCommand("-T 3 -T 1 -K secret -A -E Fred -T 2 log1");
assertCommand(2, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testDuplicateToken() {
testCommand("-T 1 -K thing1 -K thing2 -A -E Fred log1");
assertCommand(1, "thing2", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test(expected = Exception.class)
public void testMultipleEventsArrivalAndDeparture() {
testCommand("-T 1 -K secret -A -L -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testMultipleEventsArrival() {
testCommand("-T 1 -K secret -A -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testMultipleEventsDeparture() {
testCommand("-T 1 -K secret -L -L -E Fred log1");
assertCommand(1, "secret", TransitionEvent.DEPARTURE,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testArrivalLast() {
testCommand("-T 1 -K secret log1 -E Fred -A");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test(expected = Exception.class)
public void testIncompleteCommand() {
testCommand("-T 1 -K secret log1 -E Fred");
}
@Test(expected = Exception.class)
public void testBadArg() {
testCommand("-T 1 -K secret log1 -Z -A -E Fred");
}
@Test(expected = Exception.class)
public void testBatchNotAcceptedInCommand() {
testCommand("-T 1 -K secret log1 -B -A -E Fred");
}
@Test(expected = Exception.class)
public void testBatchNotAcceptedByItself() {
testCommand("-B batch");
}
@Test(expected = Exception.class)
public void testMultipeLogs() {
testCommand("-T 1 -K secret log1 -A log2 -E Fred");
}
@Test(expected = Exception.class)
public void testMultipeLogsAtEnd() {
testCommand("-T 1 -K secret -A -E Fred log1 log2");
}
@Test
public void testDepartureLast() {
testCommand("-T 1 -K secret log1 -E Fred -L");
assertCommand(1, "secret", TransitionEvent.DEPARTURE,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testTwoEmployees() {
testCommand("-T 1 -K secret -A -E Fred -E Jill log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Jill", "log1");
}
@Test(expected = Exception.class)
public void testOneEmployeeAndOneGuest() {
testCommand("-T 1 -K secret -A -E Fred -G Jill log1");
}
@Test(expected = Exception.class)
public void testOneGuestAndOneEmployee() {
testCommand("-T 1 -K secret -A -G Fred -E Jill log1");
}
@Test
public void testTwoGuests() {
testCommand("-T 1 -K secret -A -G Fred -G Jill log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL, VisitorType.GUEST,
"Jill", "log1");
}
@Test(expected = Exception.class)
public void testDuplicateKeySwitch() {
testCommand("-T 1 -K -K thing2 -A -E Fred log1");
}
@Test(expected = Exception.class)
public void testBadTime() {
testCommand("-T a -K secret -A -E Fred log1");
}
@Test(expected = Exception.class)
public void testDuplicateTimesWithOneInvalid1() {
testCommand("-T a -T 1 -K secret -A -E Fred log1");
}
@Test(expected = Exception.class)
public void testDuplicateTimesWithOneInvalid2() {
testCommand("-T 1 -T a -K secret -A -E Fred log1");
}
@Test
public void fuzzyArgumentOrderTest() {
final Random rand = new Random();
boolean useRoom = rand.nextBoolean();
long room = (useRoom) ? 101 : NO_ROOM;
List<String> args = new LinkedList<>(Arrays.asList("-T 1", "-K secret",
"-A", "-E Fred", "log1"));
if (useRoom) {
args.add("-R " + room);
}
Collections.shuffle(args);
StringBuilder builder = new StringBuilder();
args.forEach((arg) -> builder.append(arg).append(" "));
String commandString = builder.toString();
try {
testCommand(commandString);
assertCommand(commandString, 1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", room, "log1");
} catch (AssertionError | RuntimeException e) {
System.out.println("Fail on " + commandString);
throw e;
}
}
private void assertCommand(String message, long expectedTime,
String expectedToken, TransitionEvent expectedEvent,
VisitorType expectedVisitorType, String expectedName,
String expectedLogFile) {
assertCommand(message, expectedTime, expectedToken, expectedEvent,
expectedVisitorType, expectedName, NO_ROOM, expectedLogFile);
}
private void assertCommand(String message, long expectedTime,
String expectedToken, TransitionEvent expectedEvent,
VisitorType expectedVisitorType, String expectedName,
long expectedRoom, String expectedLogFile) {
if (message != null && !message.isEmpty()) {
message += " | ";
} else {
message = "";
}
assertEquals(message + "Time", expectedTime, cmd.getTimestamp());
assertEquals(message + "Token", expectedToken, cmd.getToken());
assertEquals(message + "Event", expectedEvent, cmd.getEvent());
assertEquals(message + "Visitor Type", expectedVisitorType,
cmd.getVisitorType());
assertEquals(message + "Visitor Name", expectedName,
cmd.getVisitorName());
assertEquals(message + "Room", expectedRoom, cmd.getRoom());
assertEquals(message + "Log File", expectedLogFile, cmd.getLogfile());
}
private void assertCommand(long expectedTime, String expectedToken,
TransitionEvent expectedEvent, VisitorType expectedVisitorType,
String expectedName, String expectedLogFile) {
assertCommand(null, expectedTime, expectedToken, expectedEvent,
expectedVisitorType, expectedName, expectedLogFile);
}
private void assertCommand(long expectedTime, String expectedToken,
TransitionEvent expectedEvent, VisitorType expectedVisitorType,
String expectedName, long room, String expectedLogFile) {
assertCommand(null, expectedTime, expectedToken, expectedEvent,
expectedVisitorType, expectedName, room, expectedLogFile);
}
private void testCommand(String command) {
cmd = new AppendCommand(tokenize(command));
}
private static String[] tokenize(String str) {
return str.split("\\s+");
}
}
|
build/test/org/builditbreakit/seada/logappend/AppendCommandOracleTest.java
|
package org.builditbreakit.seada.logappend;
import static org.junit.Assert.*;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import org.builditbreakit.seada.common.TransitionEvent;
import org.builditbreakit.seada.common.data.VisitorType;
import org.junit.Test;
public class AppendCommandOracleTest {
private static long NO_ROOM = -1;
private AppendCommand cmd;
@Test
public void testEmployeeArriveBuilding() {
testCommand("-T 1 -K secret -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testArgsAfterLog() {
testCommand("-K secret -A log1 -E Fred -T 1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testDuplicateRoom() {
testCommand("-T 1 -K secret -R 10 -A -E Fred -R 5 log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", 5, "log1");
}
@Test
public void testDuplicateTimes() {
testCommand("-T 1 -T 2 -K secret -A -E Fred log1");
assertCommand(2, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testTripleTimesInOrder() {
testCommand("-T 1 -T 2 -K secret -A -E Fred -T 3 log1");
assertCommand(3, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testTripleTimesOutOfOrder() {
testCommand("-T 3 -T 1 -K secret -A -E Fred -T 2 log1");
assertCommand(2, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testTripleDuplicateToken() {
testCommand("-T 1 -K thing1 -K thing2 -A -E Fred log1");
assertCommand(1, "thing2", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test(expected = Exception.class)
public void testMultipleEventsArrivalAndDeparture() {
testCommand("-T 1 -K secret -A -L -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testMultipleEventsArrival() {
testCommand("-T 1 -K secret -A -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testMultipleEventsDeparture() {
testCommand("-T 1 -K secret -A -A -E Fred log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test
public void testArrivalLast() {
testCommand("-T 1 -K secret log1 -E Fred -A");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", "log1");
}
@Test(expected = Exception.class)
public void testIncompleteCommand() {
testCommand("-T 1 -K secret log1 -E Fred");
}
@Test(expected = Exception.class)
public void testBadArg() {
testCommand("-T 1 -K secret log1 -Z -A -E Fred");
}
@Test(expected = Exception.class)
public void testBatchNotAcceptedInCommand() {
testCommand("-T 1 -K secret log1 -B -A -E Fred");
}
@Test(expected = Exception.class)
public void testBatchNotAcceptedByItself() {
testCommand("-B batch");
}
@Test(expected = Exception.class)
public void testMultipeLogs() {
testCommand("-T 1 -K secret log1 -A log2 -E Fred");
}
@Test(expected = Exception.class)
public void testMultipeLogsAtEnd() {
testCommand("-T 1 -K secret -A -E Fred log1 log2");
}
@Test
public void testDepartureLast() {
testCommand("-T 1 -K secret log1 -E Fred -L");
assertCommand(1, "secret", TransitionEvent.DEPARTURE,
VisitorType.EMPLOYEE, "Fred", "log1");
}
public void testTwoEmployees() {
testCommand("-T 1 -K secret -A -E Fred -E Jill log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Jill", "log1");
}
@Test(expected = Exception.class)
public void testOneEmployeeAndOneGuest() {
testCommand("-T 1 -K secret -A -E Fred -G Jill log1");
}
@Test(expected = Exception.class)
public void testOneGuestAndOneEmployee() {
testCommand("-T 1 -K secret -A -G Fred -E Jill log1");
}
@Test
public void testTwoGuests() {
testCommand("-T 1 -K secret -A -G Fred -G Jill log1");
assertCommand(1, "secret", TransitionEvent.ARRIVAL, VisitorType.GUEST,
"Jill", "log1");
}
@Test(expected = Exception.class)
public void testDuplicateKeySwitch() {
testCommand("-T 1 -K -K thing2 -A -E Fred log1");
}
@Test(expected = Exception.class)
public void testBadTime() {
testCommand("-T a -K secret -A -E Fred log1");
}
@Test(expected = Exception.class)
public void testDuplicateTimesWithOneInvalid1() {
testCommand("-T a -T 1 -K secret -A -E Fred log1");
}
@Test(expected = Exception.class)
public void testDuplicateTimesWithOneInvalid2() {
testCommand("-T 1 -T a -K secret -A -E Fred log1");
}
@Test
public void fuzzyArgumentOrderTest() {
final Random rand = new Random();
boolean useRoom = rand.nextBoolean();
long room = (useRoom) ? 101 : NO_ROOM;
List<String> args = new LinkedList<>(Arrays.asList("-T 1", "-K secret",
"-A", "-E Fred", "log1"));
if (useRoom) {
args.add("-R " + room);
}
Collections.shuffle(args);
StringBuilder builder = new StringBuilder();
args.forEach((arg) -> builder.append(arg).append(" "));
String commandString = builder.toString();
try {
testCommand(commandString);
assertCommand(commandString, 1, "secret", TransitionEvent.ARRIVAL,
VisitorType.EMPLOYEE, "Fred", room, "log1");
} catch (AssertionError | RuntimeException e) {
System.out.println("Fail on " + commandString);
throw e;
}
}
private void assertCommand(String message, long expectedTime,
String expectedToken, TransitionEvent expectedEvent,
VisitorType expectedVisitorType, String expectedName,
String expectedLogFile) {
assertCommand(message, expectedTime, expectedToken, expectedEvent,
expectedVisitorType, expectedName, NO_ROOM, expectedLogFile);
}
private void assertCommand(String message, long expectedTime,
String expectedToken, TransitionEvent expectedEvent,
VisitorType expectedVisitorType, String expectedName,
long expectedRoom, String expectedLogFile) {
if (message != null && !message.isEmpty()) {
message += " | ";
} else {
message = "";
}
assertEquals(message + "Time", expectedTime, cmd.getTimestamp());
assertEquals(message + "Token", expectedToken, cmd.getToken());
assertEquals(message + "Event", expectedEvent, cmd.getEvent());
assertEquals(message + "Visitor Type", expectedVisitorType,
cmd.getVisitorType());
assertEquals(message + "Visitor Name", expectedName,
cmd.getVisitorName());
assertEquals(message + "Room", expectedRoom, cmd.getRoom());
assertEquals(message + "Log File", expectedLogFile, cmd.getLogfile());
}
private void assertCommand(long expectedTime, String expectedToken,
TransitionEvent expectedEvent, VisitorType expectedVisitorType,
String expectedName, String expectedLogFile) {
assertCommand(null, expectedTime, expectedToken, expectedEvent,
expectedVisitorType, expectedName, expectedLogFile);
}
private void assertCommand(long expectedTime, String expectedToken,
TransitionEvent expectedEvent, VisitorType expectedVisitorType,
String expectedName, long room, String expectedLogFile) {
assertCommand(null, expectedTime, expectedToken, expectedEvent,
expectedVisitorType, expectedName, room, expectedLogFile);
}
private void testCommand(String command) {
cmd = new AppendCommand(tokenize(command));
}
private static String[] tokenize(String str) {
return str.split("\\s+");
}
}
|
Some tests got messed up on the merege. They are fixed now
|
build/test/org/builditbreakit/seada/logappend/AppendCommandOracleTest.java
|
Some tests got messed up on the merege. They are fixed now
|
|
Java
|
mit
|
8d2539be4468e513b42615ae595b7b13c0b4c7f8
| 0
|
adamIqbal/Health
|
package com.health.input;
import java.util.List;
import com.health.Record;
import com.health.Table;
<<<<<<< HEAD
/**
* A class for functions with can be called on all input formats.
*
*/
=======
>>>>>>> made a special class for input functions as ignore last
public class InputFunctions {
/**
* Deletes the last x lines which are not needed as specified by the user.
*
* @param table
* Gets the table with the redundant lines.
* @param config
* Gets the InputDescriptor which is used to create the original file.
* @return the new table with deleted lines.
*/
public static Table deleteLastLines(final Table table, final InputDescriptor config) {
int deletions = config.getIgnoreLast();
int size = table.size();
List<Record> tab = table.getRecords();
while (deletions > 0) {
table.removeRecord(tab.get(size));
size--;
deletions--;
}
return table;
}
}
|
health/src/main/java/com/health/input/InputFunctions.java
|
package com.health.input;
import java.util.List;
import com.health.Record;
import com.health.Table;
/**
* A class for functions with can be called on all input formats.
*
*/
public class InputFunctions {
/**
* Deletes the last x lines which are not needed as specified by the user.
*
* @param table
* Gets the table with the redundant lines.
* @param config
* Gets the InputDescriptor which is used to create the original file.
* @return the new table with deleted lines.
*/
public static Table deleteLastLines(final Table table, final InputDescriptor config) {
int deletions = config.getIgnoreLast();
int size = table.size();
List<Record> tab = table.getRecords();
while (deletions > 0) {
table.removeRecord(tab.get(size));
size--;
deletions--;
}
return table;
}
}
|
made a special class for input functions as ignore last
|
health/src/main/java/com/health/input/InputFunctions.java
|
made a special class for input functions as ignore last
|
|
Java
|
mit
|
edd8ac2f0d091eef0258613cca7d4970b15e48ec
| 0
|
hazendaz/oshi,dbwiddis/oshi
|
/*
* MIT License
*
* Copyright (c) 2019-2022 The OSHI Project Contributors: https://github.com/oshi/oshi/graphs/contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oshi.software.common;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import oshi.annotation.concurrent.ThreadSafe;
import oshi.software.os.NetworkParams;
import oshi.util.FileUtil;
import oshi.util.ParseUtil;
/**
* Common NetworkParams implementation.
*/
@ThreadSafe
public abstract class AbstractNetworkParams implements NetworkParams {
private static final String NAMESERVER = "nameserver";
@Override
public String getDomainName() {
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
localHost = InetAddress.getLoopbackAddress();
}
return localHost.getCanonicalHostName();
}
@Override
public String getHostName() {
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
localHost = InetAddress.getLoopbackAddress();
}
String hn = localHost.getHostName();
int dot = hn.indexOf('.');
if (dot == -1) {
return hn;
}
return hn.substring(0, dot);
}
@Override
public String[] getDnsServers() {
List<String> resolv = FileUtil.readFile("/etc/resolv.conf");
String key = NAMESERVER;
int maxNameServer = 3;
List<String> servers = new ArrayList<>();
for (int i = 0; i < resolv.size() && servers.size() < maxNameServer; i++) {
String line = resolv.get(i);
if (line.startsWith(key)) {
String value = line.substring(key.length()).replaceFirst("^[ \t]+", "");
if (value.length() != 0 && value.charAt(0) != '#' && value.charAt(0) != ';') {
String val = value.split("[ \t#;]", 2)[0];
servers.add(val);
}
}
}
return servers.toArray(new String[0]);
}
/**
* Convenience method to parse the output of the `route` command. While the
* command arguments vary between OS's the output is consistently parsable.
*
* @param lines
* output of OS-specific route command
* @return default gateway
*/
protected static String searchGateway(List<String> lines) {
for (String line : lines) {
String leftTrimmed = line.replaceFirst("^\\s+", "");
if (leftTrimmed.startsWith("gateway:")) {
String[] split = ParseUtil.whitespaces.split(leftTrimmed);
if (split.length < 2) {
return "";
}
return split[1].split("%")[0];
}
}
return "";
}
@Override
public String toString() {
return String.format("Host name: %s, Domain name: %s, DNS servers: %s, IPv4 Gateway: %s, IPv6 Gateway: %s",
this.getHostName(), this.getDomainName(), Arrays.toString(this.getDnsServers()),
this.getIpv4DefaultGateway(), this.getIpv6DefaultGateway());
}
}
|
oshi-core/src/main/java/oshi/software/common/AbstractNetworkParams.java
|
/*
* MIT License
*
* Copyright (c) 2019-2021 The OSHI Project Contributors: https://github.com/oshi/oshi/graphs/contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package oshi.software.common;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import oshi.annotation.concurrent.ThreadSafe;
import oshi.software.os.NetworkParams;
import oshi.util.FileUtil;
import oshi.util.ParseUtil;
/**
* Common NetworkParams implementation.
*/
@ThreadSafe
public abstract class AbstractNetworkParams implements NetworkParams {
private static final Logger LOG = LoggerFactory.getLogger(AbstractNetworkParams.class);
private static final String NAMESERVER = "nameserver";
@Override
public String getDomainName() {
try {
return InetAddress.getLocalHost().getCanonicalHostName();
} catch (UnknownHostException e) {
LOG.error("Unknown host exception when getting address of local host: {}", e.getMessage());
return "";
}
}
@Override
public String getHostName() {
try {
String hn = InetAddress.getLocalHost().getHostName();
int dot = hn.indexOf('.');
if (dot == -1) {
return hn;
}
return hn.substring(0, dot);
} catch (UnknownHostException e) {
LOG.error("Unknown host exception when getting address of local host: {}", e.getMessage());
return "";
}
}
@Override
public String[] getDnsServers() {
List<String> resolv = FileUtil.readFile("/etc/resolv.conf");
String key = NAMESERVER;
int maxNameServer = 3;
List<String> servers = new ArrayList<>();
for (int i = 0; i < resolv.size() && servers.size() < maxNameServer; i++) {
String line = resolv.get(i);
if (line.startsWith(key)) {
String value = line.substring(key.length()).replaceFirst("^[ \t]+", "");
if (value.length() != 0 && value.charAt(0) != '#' && value.charAt(0) != ';') {
String val = value.split("[ \t#;]", 2)[0];
servers.add(val);
}
}
}
return servers.toArray(new String[0]);
}
/**
* Convenience method to parse the output of the `route` command. While the
* command arguments vary between OS's the output is consistently parsable.
*
* @param lines
* output of OS-specific route command
* @return default gateway
*/
protected static String searchGateway(List<String> lines) {
for (String line : lines) {
String leftTrimmed = line.replaceFirst("^\\s+", "");
if (leftTrimmed.startsWith("gateway:")) {
String[] split = ParseUtil.whitespaces.split(leftTrimmed);
if (split.length < 2) {
return "";
}
return split[1].split("%")[0];
}
}
return "";
}
@Override
public String toString() {
return String.format("Host name: %s, Domain name: %s, DNS servers: %s, IPv4 Gateway: %s, IPv6 Gateway: %s",
this.getHostName(), this.getDomainName(), Arrays.toString(this.getDnsServers()),
this.getIpv4DefaultGateway(), this.getIpv6DefaultGateway());
}
}
|
Improve localhost exception handling
|
oshi-core/src/main/java/oshi/software/common/AbstractNetworkParams.java
|
Improve localhost exception handling
|
|
Java
|
mit
|
4cb1f3d967510434702972df450a9594d668e0f0
| 0
|
ProPra16/programmierpraktikum-abschlussprojekt-pwnyhof,ProPra16/programmierpraktikum-abschlussprojekt-pwnyhof,ProPra16/programmierpraktikum-abschlussprojekt-pwnyhof,ProPra16/programmierpraktikum-abschlussprojekt-pwnyhof
|
package de.hhu.propra16.tddt.controller;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.geometry.Insets;
import javafx.scene.control.Button;
import javafx.scene.control.MenuItem;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.paint.Color;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
public class MainScreenController {
Runtime rt = Runtime.getRuntime();
private Stage stage;
String commandLine = " ";
@FXML
public MenuItem neu, load, saveTest, saveCode, exit, catalog;
@FXML
public Button runTest, fieldClear, runCode, clear, nextTest;
@FXML
public Button nextCode, currentPhase;
@FXML
public TextArea leftTA, rightTA, console;
@FXML
public TextField commandField;
@FXML
public void handleMenuItem(ActionEvent e) {
if (e.getSource() == neu) {
leftTA.clear();
rightTA.clear();
}
if (e.getSource() == catalog) {
FileChooser fileChooser = new FileChooser();
File initialDirectory = new File("./Task");
fileChooser.setInitialDirectory(initialDirectory);
FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("Java files (*.java)", "*.java");
fileChooser.getExtensionFilters().add(extFilter);
fileChooser.showOpenDialog(stage);
}
if (e.getSource() == load) {
try {
leftTA.setText("");
BufferedReader codeLoad = new BufferedReader(new FileReader("./Task/Aufgabe1/Code.java"));
String code = null;
while ((code = codeLoad.readLine()) != null) {
if (!code.startsWith("#")) {
leftTA.setText(leftTA.getText() + code + "\n");
}
}
codeLoad.close();
rightTA.setText("");
BufferedReader testLoad = new BufferedReader(new FileReader("./Task/Aufgabe1/Try.java"));
String test = null;
while ((test = testLoad.readLine()) != null) {
if (!test.startsWith("#")) {
rightTA.setText(rightTA.getText() + test + "\n");
}
}
testLoad.close();
} catch (IOException ex) {
}
}
if (e.getSource() == saveTest) {
File testfile = new File("./Task/Aufgabe1/Try.java");
if (testfile != null) {
SaveFile(rightTA.getText(), testfile);
}
}
if (e.getSource() == saveCode) {
File codefile = new File("./Task/Aufgabe1/Code.java");
if (codefile != null) {
SaveFile(leftTA.getText(), codefile);
}
}
if (e.getSource() == exit) {
System.exit(0);
}
}
@FXML
public void handleButton(ActionEvent e) throws IOException {
Console con = new Console(console);
PrintStream out = new PrintStream(con, true);
System.setOut(out);
if (e.getSource() == runCode) {
ConfigReader config = new ConfigReader("Aufgabe1");
Information info = new Information(config.getTestName(), config.getProgramName(),
"./Task/" + config.getTask() + "/");
Program program = new Program(info, console);
boolean codeTrue = program.compile();
if(codeTrue) {
try {
nextCode.setDisable(false);
} catch (NullPointerException e2) {
}
}
program.run(" " + commandField.getText());
}
if (e.getSource() == runTest) {
ConfigReader config = new ConfigReader("Aufgabe1");
Information info = new Information(config.getTestName(), config.getProgramName(),
"./Task/" + config.getTask() + "/");
Program program = new Program(info, console);
boolean testTrue = program.test();
if(testTrue) {
try {
nextTest.setDisable(false);
} catch (NullPointerException e2) {
}
}
}
if (e.getSource() == clear) {
console.clear();
}
if (e.getSource() == fieldClear) {
commandField.clear();
}
if(e.getSource() == nextTest){
runCode.setDisable(false);
leftTA.setDisable(false);
runTest.setDisable(true);
rightTA.setDisable(true);
nextCode.setDisable(true);
currentPhase.setBackground(new Background(new BackgroundFill(Color.BLUE, CornerRadii.EMPTY, Insets.EMPTY)));
}
if(e.getSource() == nextCode){
runCode.setDisable(true);
leftTA.setDisable(true);
runTest.setDisable(false);
rightTA.setDisable(false);
nextTest.setDisable(true);
currentPhase.setBackground(new Background(new BackgroundFill(Color.RED, CornerRadii.EMPTY, Insets.EMPTY)));
}
}
private void SaveFile(String content, File file) {
try {
FileWriter fileWriter;
fileWriter = new FileWriter(file);
fileWriter.write(content);
fileWriter.close();
} catch (IOException ex) {
Logger.getLogger(MainScreenController.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
|
src/main/java/de/hhu/propra16/tddt/controller/MainScreenController.java
|
package de.hhu.propra16.tddt.controller;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.util.logging.Level;
import java.util.logging.Logger;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.geometry.Insets;
import javafx.scene.control.Button;
import javafx.scene.control.MenuItem;
import javafx.scene.control.TextArea;
import javafx.scene.control.TextField;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.CornerRadii;
import javafx.scene.paint.Color;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
public class MainScreenController {
Runtime rt = Runtime.getRuntime();
private Stage stage;
String commandLine = " ";
@FXML
public MenuItem neu, load, saveTest, saveCode, exit, catalog;
@FXML
public Button runTest, fieldClear, runCode, clear, nextTest;
@FXML
public Button nextCode, currentPhase;
@FXML
public TextArea leftTA, rightTA, console;
@FXML
public TextField commandField;
@FXML
public void handleMenuItem(ActionEvent e) {
if (e.getSource() == neu) {
leftTA.clear();
rightTA.clear();
}
if (e.getSource() == catalog) {
FileChooser fileChooser = new FileChooser();
File initialDirectory = new File("./Task");
fileChooser.setInitialDirectory(initialDirectory);
FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("Java files (*.java)", "*.java");
fileChooser.getExtensionFilters().add(extFilter);
fileChooser.showOpenDialog(stage);
}
if (e.getSource() == load) {
try {
leftTA.setText("");
BufferedReader codeLoad = new BufferedReader(new FileReader("./Task/Aufgabe1/Code.java"));
String code = null;
while ((code = codeLoad.readLine()) != null) {
if (!code.startsWith("#")) {
leftTA.setText(leftTA.getText() + code + "\n");
}
}
codeLoad.close();
rightTA.setText("");
BufferedReader testLoad = new BufferedReader(new FileReader("./Task/Aufgabe1/Try.java"));
String test = null;
while ((test = testLoad.readLine()) != null) {
if (!test.startsWith("#")) {
rightTA.setText(rightTA.getText() + test + "\n");
}
}
testLoad.close();
} catch (IOException ex) {
}
}
if (e.getSource() == saveTest) {
File testfile = new File("./Task/Aufgabe1/Try.java");
if (testfile != null) {
SaveFile(rightTA.getText(), testfile);
}
}
if (e.getSource() == saveCode) {
File codefile = new File("./Task/Aufgabe1/Code.java");
if (codefile != null) {
SaveFile(leftTA.getText(), codefile);
}
}
if (e.getSource() == exit) {
System.exit(0);
}
}
@FXML
public void handleButton(ActionEvent e) throws IOException {
Console con = new Console(console);
PrintStream out = new PrintStream(con, true);
System.setOut(out);
if (e.getSource() == runCode) {
ConfigReader config = new ConfigReader("Aufgabe1");
Information info = new Information(config.getTestName(), config.getProgramName(),
"./Task/" + config.getTask() + "/");
Program program = new Program(info, console);
boolean codeTrue = program.compile();
if(codeTrue) {
try {
nextCode.setDisable(false);
} catch (NullPointerException e2) {
}
}
program.run(" " + commandField.getText());
}
if (e.getSource() == runTest) {
ConfigReader config = new ConfigReader("Aufgabe1");
Information info = new Information(config.getTestName(), config.getProgramName(),
"./Task/" + config.getTask() + "/");
Program program = new Program(info, console);
boolean testTrue = program.test();
if(testTrue) {
try {
nextTest.setDisable(false);
} catch (NullPointerException e2) {
}
}
}
if (e.getSource() == clear) {
console.clear();
}
if (e.getSource() == fieldClear) {
commandField.clear();
}
if(e.getSource() == nextTest){
runCode.setDisable(false);
leftTA.setDisable(false);
runTest.setDisable(true);
rightTA.setDisable(true);
nextCode.setDisable(true);
currentPhase.setBackground(new Background(new BackgroundFill(Color.BLUE, CornerRadii.EMPTY, Insets.EMPTY)));
}
if(e.getSource() == nextCode){
runCode.setDisable(true);
leftTA.setDisable(true);
runTest.setDisable(false);
rightTA.setDisable(false);
nextTest.setDisable(true);
currentPhase.setBackground(new Background(new BackgroundFill(Color.RED, CornerRadii.EMPTY, Insets.EMPTY)));
}
}
private void SaveFile(String content, File file) {
try {
FileWriter fileWriter;
fileWriter = new FileWriter(file);
fileWriter.write(content);
fileWriter.close();
} catch (IOException ex) {
Logger.getLogger(MainScreenController.class.getName()).log(Level.SEVERE, null, ex);
}
}
}
|
whitespaces entfernt
|
src/main/java/de/hhu/propra16/tddt/controller/MainScreenController.java
|
whitespaces entfernt
|
|
Java
|
mit
|
091b93a03963973ddbe991eb364457a290c2ad3a
| 0
|
VisualDataWeb/OntoBench,VisualDataWeb/OntoBench,VisualDataWeb/OntoBench
|
package de.linkvt.bachelor.config;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLMutableOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import org.springframework.context.annotation.ScopedProxyMode;
import org.springframework.web.context.WebApplicationContext;
import javax.servlet.http.HttpServletRequest;
/**
* Creates beans for OWL API classes.
*/
@Configuration
public class OwlApiConfig {
@Bean
public OWLDataFactory dataFactory() {
return OWLManager.getOWLDataFactory();
}
@Bean
@Scope(value = WebApplicationContext.SCOPE_REQUEST, proxyMode = ScopedProxyMode.INTERFACES)
public OWLMutableOntology owlOntology(HttpServletRequest request) throws OWLOntologyCreationException {
OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager();
// Cast to a mutable ontology to pass OWLApi's strange checks
return (OWLMutableOntology) ontologyManager.createOntology(IRI.create(request.getRequestURL().toString()));
}
}
|
src/main/java/de/linkvt/bachelor/config/OwlApiConfig.java
|
package de.linkvt.bachelor.config;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLDataFactory;
import org.semanticweb.owlapi.model.OWLMutableOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import org.springframework.context.annotation.ScopedProxyMode;
import org.springframework.web.context.WebApplicationContext;
import javax.servlet.http.HttpServletRequest;
/**
* Creates beans for OWL API classes.
*/
@Configuration
public class OwlApiConfig {
@Bean
public OWLDataFactory dataFactory() {
return OWLManager.getOWLDataFactory();
}
@Bean
@Scope(value = WebApplicationContext.SCOPE_REQUEST, proxyMode = ScopedProxyMode.INTERFACES)
public OWLMutableOntology owlOntology(HttpServletRequest request) throws OWLOntologyCreationException {
OWLOntologyManager ontologyManager = OWLManager.createOWLOntologyManager();
// Cast to a mutable ontology to pass OWLApi's strange checks
return (OWLMutableOntology) ontologyManager.createOntology(IRI.create(createOntologyUrl(request)));
}
private String createOntologyUrl(HttpServletRequest request) {
String url = request.getRequestURL().toString();
// trim the extension
url = FilenameUtils.removeExtension(url);
String queryString = request.getQueryString();
if (StringUtils.isNotEmpty(queryString)) {
url = url + "?" + queryString;
}
return url;
}
}
|
Trim query string from ontology iri
|
src/main/java/de/linkvt/bachelor/config/OwlApiConfig.java
|
Trim query string from ontology iri
|
|
Java
|
mit
|
c99121650494fc15bced9f44236bc57db196d4d7
| 0
|
RUCD/apt-graph,RUCD/apt-graph,RUCD/apt-graph,RUCD/apt-graph
|
/*
* The MIT License
*
* Copyright 2016 Thibault Debatty.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package aptgraph.server;
import aptgraph.core.Request;
import info.debatty.java.graphs.Graph;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.util.HashMap;
import java.util.LinkedList;
import junit.framework.TestCase;
/**
*
* @author Thibault Debatty
*/
public class RequestHandlerTest extends TestCase {
/**
* Test of test method, of class RequestHandler.
*/
public void testTest() throws IOException, ClassNotFoundException {
System.out.println("test");
InputStream graph_stream =
getClass().getResourceAsStream("/dummy_graph.ser");
ObjectInputStream input = new ObjectInputStream(
new BufferedInputStream(graph_stream));
HashMap<String, LinkedList<Graph<Request>>> user_graphs =
(HashMap<String, LinkedList<Graph<Request>>>) input.readObject();
input.close();
RequestHandler handler = new RequestHandler(user_graphs);
handler.test();
}
/**
* Test of dummy method, of class RequestHandler.
*/
public void testDummy() throws IOException, ClassNotFoundException {
System.out.println("dummy");
InputStream graph_stream =
getClass().getResourceAsStream("/dummy_graph.ser");
ObjectInputStream input = new ObjectInputStream(
new BufferedInputStream(graph_stream));
HashMap<String, LinkedList<Graph<Request>>> user_graphs =
(HashMap<String, LinkedList<Graph<Request>>>) input.readObject();
input.close();
RequestHandler handler = new RequestHandler(user_graphs);
handler.dummy();
}
/**
* Test of analyze method, of class RequestHandler.
*/
public void testAnalyze() throws IOException, ClassNotFoundException {
System.out.println("analyze");
InputStream graph_stream =
getClass().getResourceAsStream("/dummy_graph.ser");
ObjectInputStream input = new ObjectInputStream(
new BufferedInputStream(graph_stream));
HashMap<String, LinkedList<Graph<Request>>> user_graphs =
(HashMap<String, LinkedList<Graph<Request>>>) input.readObject();
input.close();
RequestHandler handler = new RequestHandler(user_graphs);
handler.analyze("127.0.0.1", new double[]{0.7, 0.3},
new double[]{0.8, 0.2}, 10.0, 10);
}
}
|
server/src/test/java/aptgraph/server/RequestHandlerTest.java
|
/*
* The MIT License
*
* Copyright 2016 Thibault Debatty.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package aptgraph.server;
import aptgraph.core.Request;
import info.debatty.java.graphs.Graph;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.util.HashMap;
import java.util.LinkedList;
import junit.framework.TestCase;
/**
*
* @author Thibault Debatty
*/
public class RequestHandlerTest extends TestCase {
/**
* Test of test method, of class RequestHandler.
*/
public void testTest() throws IOException, ClassNotFoundException {
System.out.println("test");
InputStream graph_stream =
getClass().getResourceAsStream("/dummy_graph.ser");
ObjectInputStream input = new ObjectInputStream(
new BufferedInputStream(graph_stream));
HashMap<String, LinkedList<Graph<Request>>> user_graphs =
(HashMap<String, LinkedList<Graph<Request>>>) input.readObject();
input.close();
RequestHandler handler = new RequestHandler(user_graphs);
handler.test();
}
/**
* Test of dummy method, of class RequestHandler.
*/
public void testDummy() throws IOException, ClassNotFoundException {
System.out.println("dummy");
InputStream graph_stream =
getClass().getResourceAsStream("/dummy_graph.ser");
ObjectInputStream input = new ObjectInputStream(
new BufferedInputStream(graph_stream));
HashMap<String, LinkedList<Graph<Request>>> user_graphs =
(HashMap<String, LinkedList<Graph<Request>>>) input.readObject();
input.close();
RequestHandler handler = new RequestHandler(user_graphs);
handler.dummy();
}
/**
* Test of analyze method, of class RequestHandler.
*/
public void testAnalyze() throws IOException, ClassNotFoundException {
System.out.println("analyze");
InputStream graph_stream =
getClass().getResourceAsStream("/dummy_graph.ser");
ObjectInputStream input = new ObjectInputStream(
new BufferedInputStream(graph_stream));
HashMap<String, LinkedList<Graph<Request>>> user_graphs =
(HashMap<String, LinkedList<Graph<Request>>>) input.readObject();
input.close();
RequestHandler handler = new RequestHandler(user_graphs);
handler.analyze("127.0.0.1", new double[]{1.0, 1.0}, new double[]{1.0, 0.2}, 10.0, 10);
}
}
|
Minor bug fix
|
server/src/test/java/aptgraph/server/RequestHandlerTest.java
|
Minor bug fix
|
|
Java
|
epl-1.0
|
61a03b3c815a2a8ee02b2e252ef205d428641873
| 0
|
bendisposto/prob2-ui,bendisposto/prob2-ui,bendisposto/prob2-ui,bendisposto/prob2-ui
|
package de.prob2.ui.internal;
import java.io.CharArrayWriter;
import java.io.PrintWriter;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.ResourceBundle;
import com.google.inject.Injector;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIcon;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIconView;
import de.prob.animator.domainobjects.ErrorItem;
import de.prob.exception.ProBError;
import de.prob2.ui.menu.EditMenu;
import javafx.beans.binding.Bindings;
import javafx.beans.value.ObservableValue;
import javafx.fxml.FXML;
import javafx.geometry.Pos;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.OverrunStyle;
import javafx.scene.control.TableCell;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.util.Callback;
public final class ExceptionAlert extends Alert {
@FXML private VBox contentVBox;
@FXML private Label label;
@FXML private TableView<ErrorItem> proBErrorTable;
@FXML private TableColumn<ErrorItem, ErrorItem> typeColumn;
@FXML private TableColumn<ErrorItem, ErrorItem> messageColumn;
@FXML private TableColumn<ErrorItem, ErrorItem> locationsColumn;
@FXML private TextArea stackTraceTextArea;
private final StageManager stageManager;
private final ResourceBundle bundle;
private final EditMenu editMenu;
private final String text;
private final Throwable exc;
public ExceptionAlert(final Injector injector, final String text, final Throwable exc) {
super(Alert.AlertType.NONE); // Alert type is set in FXML
Objects.requireNonNull(exc);
this.stageManager = injector.getInstance(StageManager.class);
this.bundle = injector.getInstance(ResourceBundle.class);
this.editMenu = injector.getInstance(EditMenu.class);
this.text = text;
this.exc = exc;
stageManager.loadFXML(this, "exception_alert.fxml");
}
@FXML
private void initialize() {
stageManager.register(this);
ProBError proBError = null;
for (Throwable e = this.exc; e != null; e = e.getCause()) {
if (e instanceof ProBError) {
proBError = (ProBError)e;
break;
}
}
final String message;
if (proBError != null) {
message = proBError.getOriginalMessage();
} else {
message = this.exc.getMessage();
}
this.label.setText(this.text + ":\n" + message);
final Callback<TableColumn.CellDataFeatures<ErrorItem, ErrorItem>, ObservableValue<ErrorItem>> cellValueFactory = features -> Bindings.createObjectBinding(features::getValue);
this.typeColumn.setCellValueFactory(cellValueFactory);
this.messageColumn.setCellValueFactory(cellValueFactory);
this.locationsColumn.setCellValueFactory(cellValueFactory);
this.typeColumn.setCellFactory(col -> new TableCell<ErrorItem, ErrorItem>() {
@Override
protected void updateItem(final ErrorItem item, final boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
this.setText(null);
} else {
final String typeName;
switch (item.getType()) {
case WARNING:
typeName = bundle.getString("exceptionAlert.proBErrorTable.type.warning");
break;
case ERROR:
typeName = bundle.getString("exceptionAlert.proBErrorTable.type.error");
break;
case INTERNAL_ERROR:
typeName = bundle.getString("exceptionAlert.proBErrorTable.type.internalError");
break;
default:
typeName = item.getType().name();
}
this.setText(typeName);
}
}
});
this.messageColumn.setCellFactory(col -> new TableCell<ErrorItem, ErrorItem>() {
@Override
protected void updateItem(final ErrorItem item, final boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
this.setText(null);
} else {
this.setText(item.getMessage());
}
}
});
this.locationsColumn.setCellFactory(col -> {
final TableCell<ErrorItem, ErrorItem> cell = new TableCell<ErrorItem, ErrorItem>() {
@Override
protected void updateItem(final ErrorItem item, final boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
this.setGraphic(null);
} else {
final VBox vbox = new VBox();
for (final ErrorItem.Location location : item.getLocations()) {
final Button openLocationButton = new Button(null, new FontAwesomeIconView(FontAwesomeIcon.PENCIL));
openLocationButton.setOnAction(event -> openLocationInEditor(location));
final Label label = new Label(location.toString());
label.setTextOverrun(OverrunStyle.LEADING_ELLIPSIS);
final HBox hbox = new HBox(openLocationButton, label);
HBox.setHgrow(openLocationButton, Priority.NEVER);
HBox.setHgrow(label, Priority.ALWAYS);
hbox.setAlignment(Pos.CENTER_LEFT);
vbox.getChildren().add(hbox);
}
this.setGraphic(vbox);
}
}
};
return cell;
});
try (final CharArrayWriter caw = new CharArrayWriter(); final PrintWriter pw = new PrintWriter(caw)) {
exc.printStackTrace(pw);
this.stackTraceTextArea.setText(caw.toString());
}
if (proBError != null && proBError.getErrors() != null) {
this.proBErrorTable.getItems().setAll(proBError.getErrors());
} else {
this.contentVBox.getChildren().remove(this.proBErrorTable);
}
}
private void openLocationInEditor(final ErrorItem.Location location) {
// TODO Jump to error location in file
editMenu.showEditorStage(Paths.get(location.getFilename()));
}
}
|
src/main/java/de/prob2/ui/internal/ExceptionAlert.java
|
package de.prob2.ui.internal;
import java.io.CharArrayWriter;
import java.io.PrintWriter;
import java.nio.file.Paths;
import java.util.Objects;
import java.util.ResourceBundle;
import com.google.inject.Injector;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIcon;
import de.jensd.fx.glyphs.fontawesome.FontAwesomeIconView;
import de.prob.animator.domainobjects.ErrorItem;
import de.prob.exception.ProBError;
import de.prob2.ui.menu.EditMenu;
import javafx.beans.binding.Bindings;
import javafx.beans.value.ObservableValue;
import javafx.fxml.FXML;
import javafx.geometry.Pos;
import javafx.scene.control.Alert;
import javafx.scene.control.Button;
import javafx.scene.control.Label;
import javafx.scene.control.OverrunStyle;
import javafx.scene.control.TableCell;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.TextArea;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.util.Callback;
public final class ExceptionAlert extends Alert {
@FXML private VBox contentVBox;
@FXML private Label label;
@FXML private TableView<ErrorItem> proBErrorTable;
@FXML private TableColumn<ErrorItem, ErrorItem> typeColumn;
@FXML private TableColumn<ErrorItem, ErrorItem> messageColumn;
@FXML private TableColumn<ErrorItem, ErrorItem> locationsColumn;
@FXML private TextArea stackTraceTextArea;
private final StageManager stageManager;
private final ResourceBundle bundle;
private final EditMenu editMenu;
private final String text;
private final Throwable exc;
public ExceptionAlert(final Injector injector, final String text, final Throwable exc) {
super(Alert.AlertType.NONE); // Alert type is set in FXML
Objects.requireNonNull(exc);
this.stageManager = injector.getInstance(StageManager.class);
this.bundle = injector.getInstance(ResourceBundle.class);
this.editMenu = injector.getInstance(EditMenu.class);
this.text = text;
this.exc = exc;
stageManager.loadFXML(this, "exception_alert.fxml");
}
@FXML
private void initialize() {
stageManager.register(this);
final String message;
if (this.exc instanceof ProBError) {
message = ((ProBError)this.exc).getOriginalMessage();
} else {
message = this.exc.getMessage();
}
this.label.setText(this.text + ":\n" + message);
final Callback<TableColumn.CellDataFeatures<ErrorItem, ErrorItem>, ObservableValue<ErrorItem>> cellValueFactory = features -> Bindings.createObjectBinding(features::getValue);
this.typeColumn.setCellValueFactory(cellValueFactory);
this.messageColumn.setCellValueFactory(cellValueFactory);
this.locationsColumn.setCellValueFactory(cellValueFactory);
this.typeColumn.setCellFactory(col -> new TableCell<ErrorItem, ErrorItem>() {
@Override
protected void updateItem(final ErrorItem item, final boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
this.setText(null);
} else {
final String typeName;
switch (item.getType()) {
case WARNING:
typeName = bundle.getString("exceptionAlert.proBErrorTable.type.warning");
break;
case ERROR:
typeName = bundle.getString("exceptionAlert.proBErrorTable.type.error");
break;
case INTERNAL_ERROR:
typeName = bundle.getString("exceptionAlert.proBErrorTable.type.internalError");
break;
default:
typeName = item.getType().name();
}
this.setText(typeName);
}
}
});
this.messageColumn.setCellFactory(col -> new TableCell<ErrorItem, ErrorItem>() {
@Override
protected void updateItem(final ErrorItem item, final boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
this.setText(null);
} else {
this.setText(item.getMessage());
}
}
});
this.locationsColumn.setCellFactory(col -> {
final TableCell<ErrorItem, ErrorItem> cell = new TableCell<ErrorItem, ErrorItem>() {
@Override
protected void updateItem(final ErrorItem item, final boolean empty) {
super.updateItem(item, empty);
if (empty || item == null) {
this.setGraphic(null);
} else {
final VBox vbox = new VBox();
for (final ErrorItem.Location location : item.getLocations()) {
final Button openLocationButton = new Button(null, new FontAwesomeIconView(FontAwesomeIcon.PENCIL));
openLocationButton.setOnAction(event -> openLocationInEditor(location));
final Label label = new Label(location.toString());
label.setTextOverrun(OverrunStyle.LEADING_ELLIPSIS);
final HBox hbox = new HBox(openLocationButton, label);
HBox.setHgrow(openLocationButton, Priority.NEVER);
HBox.setHgrow(label, Priority.ALWAYS);
hbox.setAlignment(Pos.CENTER_LEFT);
vbox.getChildren().add(hbox);
}
this.setGraphic(vbox);
}
}
};
return cell;
});
try (final CharArrayWriter caw = new CharArrayWriter(); final PrintWriter pw = new PrintWriter(caw)) {
exc.printStackTrace(pw);
this.stackTraceTextArea.setText(caw.toString());
}
if (exc instanceof ProBError && ((ProBError)exc).getErrors() != null) {
this.proBErrorTable.getItems().setAll(((ProBError)exc).getErrors());
} else {
this.contentVBox.getChildren().remove(this.proBErrorTable);
}
}
private void openLocationInEditor(final ErrorItem.Location location) {
// TODO Jump to error location in file
editMenu.showEditorStage(Paths.get(location.getFilename()));
}
}
|
Detect wrapped ProBErrors in ExceptionAlert
|
src/main/java/de/prob2/ui/internal/ExceptionAlert.java
|
Detect wrapped ProBErrors in ExceptionAlert
|
|
Java
|
agpl-3.0
|
a0e6fbbf9e4275e0f07631f9356a1b43d0c34cb6
| 0
|
elki-project/elki,elki-project/elki,elki-project/elki
|
package de.lmu.ifi.dbs.elki.algorithm.benchmark;
/*
This file is part of ELKI:
Environment for Developing KDD-Applications Supported by Index-Structures
Copyright (C) 2012
Ludwig-Maximilians-Universität München
Lehr- und Forschungseinheit für Datenbanksysteme
ELKI Development Team
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import de.lmu.ifi.dbs.elki.algorithm.AbstractDistanceBasedAlgorithm;
import de.lmu.ifi.dbs.elki.data.type.TypeInformation;
import de.lmu.ifi.dbs.elki.data.type.TypeUtil;
import de.lmu.ifi.dbs.elki.database.Database;
import de.lmu.ifi.dbs.elki.database.ids.DBIDIter;
import de.lmu.ifi.dbs.elki.database.ids.DBIDRange;
import de.lmu.ifi.dbs.elki.database.ids.DBIDUtil;
import de.lmu.ifi.dbs.elki.database.ids.DBIDs;
import de.lmu.ifi.dbs.elki.database.query.distance.DistanceQuery;
import de.lmu.ifi.dbs.elki.database.query.knn.KNNQuery;
import de.lmu.ifi.dbs.elki.database.relation.Relation;
import de.lmu.ifi.dbs.elki.datasource.DatabaseConnection;
import de.lmu.ifi.dbs.elki.datasource.bundle.MultipleObjectsBundle;
import de.lmu.ifi.dbs.elki.distance.distancefunction.DistanceFunction;
import de.lmu.ifi.dbs.elki.distance.distanceresultlist.KNNResult;
import de.lmu.ifi.dbs.elki.distance.distancevalue.Distance;
import de.lmu.ifi.dbs.elki.distance.distancevalue.NumberDistance;
import de.lmu.ifi.dbs.elki.logging.Logging;
import de.lmu.ifi.dbs.elki.logging.progress.FiniteProgress;
import de.lmu.ifi.dbs.elki.math.MeanVariance;
import de.lmu.ifi.dbs.elki.result.Result;
import de.lmu.ifi.dbs.elki.utilities.RandomFactory;
import de.lmu.ifi.dbs.elki.utilities.Util;
import de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.Parameterization;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.DoubleParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.IntParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.ObjectParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.RandomParameter;
/**
* Benchmarking algorithm that computes the k nearest neighbors for each query
* point. The query points can either come from a separate data source, or from
* the original database.
*
* @author Erich Schubert
*
* @param <O> Object type
*
* @apiviz.uses KNNQuery
*/
public class KNNBenchmarkAlgorithm<O, D extends Distance<D>> extends AbstractDistanceBasedAlgorithm<O, D, Result> {
/**
* The logger for this class.
*/
private static final Logging LOG = Logging.getLogger(KNNBenchmarkAlgorithm.class);
/**
* Number of neighbors to retrieve.
*/
protected int k = 10;
/**
* The alternate query point source. Optional.
*/
protected DatabaseConnection queries = null;
/**
* Sampling size.
*/
protected double sampling = -1;
/**
* Random generator factory
*/
protected RandomFactory random;
/**
* Constructor.
*
* @param distanceFunction Distance function to use
* @param k K parameter
* @param queries Query data set (may be null!)
* @param sampling Sampling rate
* @param random Random factory
*/
public KNNBenchmarkAlgorithm(DistanceFunction<? super O, D> distanceFunction, int k, DatabaseConnection queries, double sampling, RandomFactory random) {
super(distanceFunction);
this.k = k;
this.queries = queries;
this.sampling = sampling;
this.random = random;
}
/**
* Run the algorithm.
*
* @param database Database
* @param relation Relation
* @return Null result
*/
public Result run(Database database, Relation<O> relation) {
// Get a distance and kNN query instance.
DistanceQuery<O, D> distQuery = database.getDistanceQuery(relation, getDistanceFunction());
KNNQuery<O, D> knnQuery = database.getKNNQuery(distQuery, 10);
// No query set - use original database.
if (queries == null) {
final DBIDs sample;
if (sampling <= 0) {
sample = relation.getDBIDs();
} else if (sampling < 1.1) {
int size = (int) Math.min(sampling * relation.size(), relation.size());
sample = DBIDUtil.randomSample(relation.getDBIDs(), size, random);
} else {
int size = (int) Math.min(sampling, relation.size());
sample = DBIDUtil.randomSample(relation.getDBIDs(), size, random);
}
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null;
int hash = 0;
MeanVariance mv = new MeanVariance(), mvdist = new MeanVariance();
for (DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) {
KNNResult<D> knns = knnQuery.getKNNForDBID(iditer, k);
int ichecksum = 0;
for (DBIDIter it = knns.iter(); it.valid(); it.advance()) {
ichecksum += it.internalGetIndex();
}
hash = Util.mixHashCodes(hash, ichecksum);
mv.put(knns.size());
D kdist = knns.getKNNDistance();
if (kdist instanceof NumberDistance) {
mvdist.put(((NumberDistance<?, ?>) kdist).doubleValue());
}
if (prog != null) {
prog.incrementProcessed(LOG);
}
}
if (prog != null) {
prog.ensureCompleted(LOG);
}
if (LOG.isVerbose()) {
LOG.verbose("Result hashcode: " + hash);
LOG.verbose("Mean number of results: " + mv.toString());
if (mvdist.getCount() > 0) {
LOG.verbose("Mean k-distance: " + mvdist.toString());
}
}
} else {
// Separate query set.
TypeInformation res = getDistanceFunction().getInputTypeRestriction();
MultipleObjectsBundle bundle = queries.loadData();
int col = -1;
for (int i = 0; i < bundle.metaLength(); i++) {
if (res.isAssignableFromType(bundle.meta(i))) {
col = i;
break;
}
}
if (col < 0) {
throw new AbortException("No compatible data type in query input was found. Expected: " + res.toString());
}
// Random sampling is a bit of hack, sorry.
// But currently, we don't (yet) have an "integer random sample" function.
DBIDRange sids = DBIDUtil.generateStaticDBIDRange(bundle.dataLength());
final DBIDs sample;
if (sampling <= 0) {
sample = sids;
} else if (sampling < 1.1) {
int size = (int) Math.min(sampling * relation.size(), relation.size());
sample = DBIDUtil.randomSample(sids, size, random);
} else {
int size = (int) Math.min(sampling, sids.size());
sample = DBIDUtil.randomSample(sids, size, random);
}
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null;
int hash = 0;
MeanVariance mv = new MeanVariance(), mvdist = new MeanVariance();
for (DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) {
int off = sids.binarySearch(iditer);
assert (off >= 0);
@SuppressWarnings("unchecked")
O o = (O) bundle.data(off, col);
KNNResult<D> knns = knnQuery.getKNNForObject(o, k);
int ichecksum = 0;
for (DBIDIter it = knns.iter(); it.valid(); it.advance()) {
ichecksum += it.internalGetIndex();
}
hash = Util.mixHashCodes(hash, ichecksum);
mv.put(knns.size());
D kdist = knns.getKNNDistance();
if (kdist instanceof NumberDistance) {
mvdist.put(((NumberDistance<?, ?>) kdist).doubleValue());
}
if (prog != null) {
prog.incrementProcessed(LOG);
}
if (LOG.isVerbose()) {
LOG.verbose("Result hashcode: " + hash);
LOG.verbose("Mean number of results: " + mv.toString());
if (mvdist.getCount() > 0) {
LOG.verbose("Mean k-distance: " + mvdist.toString());
}
}
}
if (prog != null) {
prog.ensureCompleted(LOG);
}
}
return null;
}
@Override
public TypeInformation[] getInputTypeRestriction() {
return TypeUtil.array(getDistanceFunction().getInputTypeRestriction());
}
@Override
protected Logging getLogger() {
return LOG;
}
/**
* Parameterization class
*
* @apiviz.exclude
*
* @author Erich Schubert
*
* @param <O> Object type
* @param <D> Distance type
*/
public static class Parameterizer<O, D extends Distance<D>> extends AbstractDistanceBasedAlgorithm.Parameterizer<O, D> {
/**
* Parameter for the number of neighbors.
*/
public static final OptionID K_ID = new OptionID("knnbench.k", "Number of neighbors to retreive for kNN benchmarking.");
/**
* Parameter for the query dataset.
*/
public static final OptionID QUERY_ID = new OptionID("knnbench.query", "Data source for the queries. If not set, the queries are taken from the database.");
/**
* Parameter for the sampling size.
*/
public static final OptionID SAMPLING_ID = new OptionID("knnbench.sampling", "Sampling size parameter. If the value is less or equal 1, it is assumed to be the relative share. Larger values will be interpreted as integer sizes. By default, all data will be used.");
/**
* Parameter for the random generator
*/
public static final OptionID RANDOM_ID = new OptionID("knnbench.random", "Random generator for sampling.");
/**
* K parameter
*/
protected int k = 10;
/**
* The alternate query point source. Optional.
*/
protected DatabaseConnection queries = null;
/**
* Sampling size.
*/
protected double sampling = -1;
/**
* Random generator factory
*/
protected RandomFactory random;
@Override
protected void makeOptions(Parameterization config) {
super.makeOptions(config);
IntParameter kP = new IntParameter(K_ID);
if (config.grab(kP)) {
k = kP.intValue();
}
ObjectParameter<DatabaseConnection> queryP = new ObjectParameter<>(QUERY_ID, DatabaseConnection.class);
queryP.setOptional(true);
if (config.grab(queryP)) {
queries = queryP.instantiateClass(config);
}
DoubleParameter samplingP = new DoubleParameter(SAMPLING_ID);
samplingP.setOptional(true);
if (config.grab(samplingP)) {
sampling = samplingP.doubleValue();
}
RandomParameter randomP = new RandomParameter(RANDOM_ID, RandomFactory.DEFAULT);
if (config.grab(randomP)) {
random = randomP.getValue();
}
}
@Override
protected KNNBenchmarkAlgorithm<O, D> makeInstance() {
return new KNNBenchmarkAlgorithm<>(distanceFunction, k, queries, sampling, random);
}
}
}
|
src/de/lmu/ifi/dbs/elki/algorithm/benchmark/KNNBenchmarkAlgorithm.java
|
package de.lmu.ifi.dbs.elki.algorithm.benchmark;
/*
This file is part of ELKI:
Environment for Developing KDD-Applications Supported by Index-Structures
Copyright (C) 2012
Ludwig-Maximilians-Universität München
Lehr- und Forschungseinheit für Datenbanksysteme
ELKI Development Team
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import de.lmu.ifi.dbs.elki.algorithm.AbstractDistanceBasedAlgorithm;
import de.lmu.ifi.dbs.elki.data.type.TypeInformation;
import de.lmu.ifi.dbs.elki.data.type.TypeUtil;
import de.lmu.ifi.dbs.elki.database.Database;
import de.lmu.ifi.dbs.elki.database.ids.DBIDIter;
import de.lmu.ifi.dbs.elki.database.ids.DBIDRange;
import de.lmu.ifi.dbs.elki.database.ids.DBIDUtil;
import de.lmu.ifi.dbs.elki.database.ids.DBIDs;
import de.lmu.ifi.dbs.elki.database.query.distance.DistanceQuery;
import de.lmu.ifi.dbs.elki.database.query.knn.KNNQuery;
import de.lmu.ifi.dbs.elki.database.relation.Relation;
import de.lmu.ifi.dbs.elki.datasource.DatabaseConnection;
import de.lmu.ifi.dbs.elki.datasource.bundle.MultipleObjectsBundle;
import de.lmu.ifi.dbs.elki.distance.distancefunction.DistanceFunction;
import de.lmu.ifi.dbs.elki.distance.distanceresultlist.KNNResult;
import de.lmu.ifi.dbs.elki.distance.distancevalue.Distance;
import de.lmu.ifi.dbs.elki.logging.Logging;
import de.lmu.ifi.dbs.elki.logging.progress.FiniteProgress;
import de.lmu.ifi.dbs.elki.math.MeanVariance;
import de.lmu.ifi.dbs.elki.result.Result;
import de.lmu.ifi.dbs.elki.utilities.RandomFactory;
import de.lmu.ifi.dbs.elki.utilities.Util;
import de.lmu.ifi.dbs.elki.utilities.exceptions.AbortException;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.OptionID;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameterization.Parameterization;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.DoubleParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.IntParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.ObjectParameter;
import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.RandomParameter;
/**
* Benchmarking algorithm that computes the k nearest neighbors for each query
* point. The query points can either come from a separate data source, or from
* the original database.
*
* @author Erich Schubert
*
* @param <O> Object type
*
* @apiviz.uses KNNQuery
*/
public class KNNBenchmarkAlgorithm<O, D extends Distance<D>> extends AbstractDistanceBasedAlgorithm<O, D, Result> {
/**
* The logger for this class.
*/
private static final Logging LOG = Logging.getLogger(KNNBenchmarkAlgorithm.class);
/**
* Number of neighbors to retrieve.
*/
protected int k = 10;
/**
* The alternate query point source. Optional.
*/
protected DatabaseConnection queries = null;
/**
* Sampling size.
*/
protected double sampling = -1;
/**
* Random generator factory
*/
protected RandomFactory random;
/**
* Constructor.
*
* @param distanceFunction Distance function to use
* @param k K parameter
* @param queries Query data set (may be null!)
* @param sampling Sampling rate
* @param random Random factory
*/
public KNNBenchmarkAlgorithm(DistanceFunction<? super O, D> distanceFunction, int k, DatabaseConnection queries, double sampling, RandomFactory random) {
super(distanceFunction);
this.k = k;
this.queries = queries;
this.sampling = sampling;
this.random = random;
}
/**
* Run the algorithm.
*
* @param database Database
* @param relation Relation
* @return Null result
*/
public Result run(Database database, Relation<O> relation) {
// Get a distance and kNN query instance.
DistanceQuery<O, D> distQuery = database.getDistanceQuery(relation, getDistanceFunction());
KNNQuery<O, D> knnQuery = database.getKNNQuery(distQuery, 10);
// No query set - use original database.
if (queries == null) {
final DBIDs sample;
if (sampling <= 0) {
sample = relation.getDBIDs();
} else if (sampling < 1.1) {
int size = (int) Math.min(sampling * relation.size(), relation.size());
sample = DBIDUtil.randomSample(relation.getDBIDs(), size, random);
} else {
int size = (int) Math.min(sampling, relation.size());
sample = DBIDUtil.randomSample(relation.getDBIDs(), size, random);
}
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null;
int hash = 0;
MeanVariance mv = new MeanVariance();
for (DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) {
KNNResult<D> knns = knnQuery.getKNNForDBID(iditer, k);
int ichecksum = 0;
for (DBIDIter it = knns.iter(); it.valid(); it.advance()) {
ichecksum += it.internalGetIndex();
}
hash = Util.mixHashCodes(hash, ichecksum);
mv.put(knns.size());
if (prog != null) {
prog.incrementProcessed(LOG);
}
}
if (prog != null) {
prog.ensureCompleted(LOG);
}
if (LOG.isVerbose()) {
LOG.verbose("Result hashcode: " + hash);
LOG.verbose("Mean number of results: "+mv.toString());
}
} else {
// Separate query set.
TypeInformation res = getDistanceFunction().getInputTypeRestriction();
MultipleObjectsBundle bundle = queries.loadData();
int col = -1;
for (int i = 0; i < bundle.metaLength(); i++) {
if (res.isAssignableFromType(bundle.meta(i))) {
col = i;
break;
}
}
if (col < 0) {
throw new AbortException("No compatible data type in query input was found. Expected: " + res.toString());
}
// Random sampling is a bit of hack, sorry.
// But currently, we don't (yet) have an "integer random sample" function.
DBIDRange sids = DBIDUtil.generateStaticDBIDRange(bundle.dataLength());
final DBIDs sample;
if (sampling <= 0) {
sample = sids;
} else if (sampling < 1.1) {
int size = (int) Math.min(sampling * relation.size(), relation.size());
sample = DBIDUtil.randomSample(sids, size, random);
} else {
int size = (int) Math.min(sampling, sids.size());
sample = DBIDUtil.randomSample(sids, size, random);
}
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null;
int hash = 0;
MeanVariance mv = new MeanVariance();
for (DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) {
int off = sids.binarySearch(iditer);
assert (off >= 0);
@SuppressWarnings("unchecked")
O o = (O) bundle.data(off, col);
KNNResult<D> knns = knnQuery.getKNNForObject(o, k);
int ichecksum = 0;
for (DBIDIter it = knns.iter(); it.valid(); it.advance()) {
ichecksum += it.internalGetIndex();
}
hash = Util.mixHashCodes(hash, ichecksum);
mv.put(knns.size());
if (prog != null) {
prog.incrementProcessed(LOG);
}
if (LOG.isVerbose()) {
LOG.verbose("Result hashcode: " + hash);
LOG.verbose("Mean number of results: "+mv.toString());
}
}
if (prog != null) {
prog.ensureCompleted(LOG);
}
}
return null;
}
@Override
public TypeInformation[] getInputTypeRestriction() {
return TypeUtil.array(getDistanceFunction().getInputTypeRestriction());
}
@Override
protected Logging getLogger() {
return LOG;
}
/**
* Parameterization class
*
* @apiviz.exclude
*
* @author Erich Schubert
*
* @param <O> Object type
* @param <D> Distance type
*/
public static class Parameterizer<O, D extends Distance<D>> extends AbstractDistanceBasedAlgorithm.Parameterizer<O, D> {
/**
* Parameter for the number of neighbors.
*/
public static final OptionID K_ID = new OptionID("knnbench.k", "Number of neighbors to retreive for kNN benchmarking.");
/**
* Parameter for the query dataset.
*/
public static final OptionID QUERY_ID = new OptionID("knnbench.query", "Data source for the queries. If not set, the queries are taken from the database.");
/**
* Parameter for the sampling size.
*/
public static final OptionID SAMPLING_ID = new OptionID("knnbench.sampling", "Sampling size parameter. If the value is less or equal 1, it is assumed to be the relative share. Larger values will be interpreted as integer sizes. By default, all data will be used.");
/**
* Parameter for the random generator
*/
public static final OptionID RANDOM_ID = new OptionID("knnbench.random", "Random generator for sampling.");
/**
* K parameter
*/
protected int k = 10;
/**
* The alternate query point source. Optional.
*/
protected DatabaseConnection queries = null;
/**
* Sampling size.
*/
protected double sampling = -1;
/**
* Random generator factory
*/
protected RandomFactory random;
@Override
protected void makeOptions(Parameterization config) {
super.makeOptions(config);
IntParameter kP = new IntParameter(K_ID);
if (config.grab(kP)) {
k = kP.intValue();
}
ObjectParameter<DatabaseConnection> queryP = new ObjectParameter<>(QUERY_ID, DatabaseConnection.class);
queryP.setOptional(true);
if (config.grab(queryP)) {
queries = queryP.instantiateClass(config);
}
DoubleParameter samplingP = new DoubleParameter(SAMPLING_ID);
samplingP.setOptional(true);
if (config.grab(samplingP)) {
sampling = samplingP.doubleValue();
}
RandomParameter randomP = new RandomParameter(RANDOM_ID, RandomFactory.DEFAULT);
if (config.grab(randomP)) {
random = randomP.getValue();
}
}
@Override
protected KNNBenchmarkAlgorithm<O, D> makeInstance() {
return new KNNBenchmarkAlgorithm<>(distanceFunction, k, queries, sampling, random);
}
}
}
|
Also report mean-var of knn distance.
|
src/de/lmu/ifi/dbs/elki/algorithm/benchmark/KNNBenchmarkAlgorithm.java
|
Also report mean-var of knn distance.
|
|
Java
|
lgpl-2.1
|
2938bb46a1c1f9436f1a7da5f826725a801f0072
| 0
|
YagoGG/loklak_server,singhpratyush/loklak_server,loklak/loklak_server,karajrish/loklak_server,wavicles/loklak_server,PiotrKowalski/loklak_server,loklak/loklak_server,loklak/loklak_server,singhpratyush/loklak_server,shivenmian/loklak_server,djmgit/apps.loklak.org,YagoGG/loklak_server,singhpratyush/loklak_server,sudheesh001/loklak_server,daminisatya/loklak_server,kavithaenair/apps.loklak.org,PiotrKowalski/loklak_server,wavicles/loklak_server,shivenmian/loklak_server,karajrish/loklak_server,loklak/loklak_server,smokingwheels/loklak_server_frontend_hdd,daminisatya/loklak_server,arashahmadi/sensemi_ai,smokingwheels/loklak_server_frontend_hdd,loklak/loklak_server,daminisatya/loklak_server,smsunarto/loklak_server,YagoGG/loklak_server,gopalmeena/loklak_server,sudheesh001/loklak_server,loklak/loklak_server,singhpratyush/loklak_server,smokingwheels/loklak_server_frontend_hdd,djmgit/apps.loklak.org,PiotrKowalski/loklak_server,daminisatya/loklak_server,YagoGG/loklak_server,singhpratyush/loklak_server,sudheesh001/loklak_server,smsunarto/loklak_server,smsunarto/loklak_server,fazeem84/susi_server,YagoGG/loklak_server,smsunarto/loklak_server,DravitLochan/susi_server,sudheesh001/loklak_server,PiotrKowalski/loklak_server,singhpratyush/loklak_server,DravitLochan/susi_server,fazeem84/susi_server,karajrish/loklak_server,shivenmian/loklak_server,YagoGG/loklak_server,PiotrKowalski/loklak_server,djmgit/apps.loklak.org,daminisatya/loklak_server,djmgit/apps.loklak.org,gopalmeena/loklak_server,daminisatya/loklak_server,gopalmeena/loklak_server,smsunarto/loklak_server,arashahmadi/sensemi_ai,singhpratyush/loklak_server,smokingwheels/loklak_server_frontend_hdd,kavithaenair/apps.loklak.org,arashahmadi/sensemi_ai,shivenmian/loklak_server,PiotrKowalski/loklak_server,YagoGG/loklak_server,shivenmian/loklak_server,wavicles/loklak_server,kavithaenair/apps.loklak.org,smokingwheels/loklak_server_frontend_hdd,sudheesh001/loklak_server,smokingwheels/loklak_server_frontend_hdd,smokingwheels/loklak_server_frontend_hdd,wavicles/loklak_server,gopalmeena/loklak_server,smsunarto/loklak_server,gopalmeena/loklak_server,DravitLochan/susi_server,karajrish/loklak_server,shivenmian/loklak_server,shivenmian/loklak_server,sudheesh001/loklak_server,smsunarto/loklak_server,kavithaenair/apps.loklak.org,fazeem84/susi_server,gopalmeena/loklak_server,PiotrKowalski/loklak_server,DravitLochan/susi_server,sudheesh001/loklak_server,daminisatya/loklak_server,fazeem84/susi_server,loklak/loklak_server,arashahmadi/sensemi_ai
|
/**
* Query
* Copyright 26.04.2015 by Michael Peter Christen, @0rb1t3r
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; wo even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package org.loklak.data;
import java.io.IOException;
import java.net.MalformedURLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.loklak.harvester.SourceType;
import org.loklak.tools.DateParser;
import com.fasterxml.jackson.core.JsonGenerator;
/**
* A Query is a recording of a search result based on the query.
* THIS IS NOT RECORDED TO TRACK USER ACTIONS, THIS IS USED TO RE-SEARCH A QUERY INDEFINITELY!
* Each query will be stored in elasticsearch and retrieved by the caretaker process in
* order of the retrieval_next field. That date is calculated based on the number of search results
* in the last time; the retrieval_next is estimated based on the time interval of all tweets in
* the search results of the last query.
*
* Privacy is important:
* TO ALL COMMITTERS: please do not add any user-identification details to the data structures
* to protect the privacy of the users; TO CODE EVALUATORS: please look for yourself that this
* code does not contain any user-related information (like IP, user agent etc.).
*/
public class QueryEntry extends AbstractIndexEntry implements IndexEntry {
private final static long DAY_MILLIS = 1000L * 60L * 60L * 24L;
private final static int RETRIEVAL_CONSTANT = 20; // the number of messages that we get with each retrieval at maximum
public static double ttl_factor = 0.5d;
protected String query; // the query in the exact way as the user typed it in
protected int query_length; // the length in the query, number of characters
protected SourceType source_type; // the (external) retrieval system where that query was submitted
protected int timezoneOffset; // the timezone offset of the user
protected Date query_first; // the date when this query was submitted by the user the first time
protected Date query_last; // the date when this query was submitted by the user the last time
protected Date retrieval_last; // the last time when this query was submitted to the external system
protected Date retrieval_next; // the estimated next time when the query should be submitted to get all messages
protected Date expected_next; // the estimated next time when one single message will appear
protected int query_count; // the number of queries by the user of that query done so far
protected int retrieval_count; // the number of retrievals of that query done so far to the external system
protected long message_period; // the estimated period length between two messages
protected int messages_per_day; // a message frequency based on the last query
protected long score_retrieval; // score for the retrieval order
protected long score_suggest; // score for the suggest order
/**
* This initializer can only be used for first-time creation of a query track.
* @param query
* @param timezoneOffset
* @param timeline
* @param source_type
* @throws MalformedURLException
*/
public QueryEntry(final String query, final int timezoneOffset, final Timeline timeline, final SourceType source_type, final boolean byUserQuery) {
this.query = query;
this.query_length = query.length();
this.timezoneOffset = timezoneOffset;
this.source_type = source_type;
this.retrieval_count = 0; // will be set to 1 with first update
this.message_period = 0; // means: unknown
this.messages_per_day = 0; // means: unknown
this.score_retrieval = 0;
this.score_suggest = 0;
update(timeline, byUserQuery);
this.query_first = retrieval_last;
}
protected QueryEntry(Map<String, Object> map) throws IllegalArgumentException {
init(map);
}
public void init(Map<String, Object> map) throws IllegalArgumentException {
this.query = (String) map.get("query");
this.query_length = (int) parseLong((Number) map.get("query_length"));
String source_type_string = (String) map.get("source_type"); if (source_type_string == null) source_type_string = SourceType.USER.name();
this.source_type = SourceType.valueOf(source_type_string);
this.timezoneOffset = (int) parseLong((Number) map.get("timezoneOffset"));
Date now = new Date();
this.query_first = parseDate(map.get("query_first"), now);
this.query_last = parseDate(map.get("query_last"), now);
this.retrieval_last = parseDate(map.get("retrieval_last"), now);
this.retrieval_next = parseDate(map.get("retrieval_next"), now);
this.expected_next = parseDate(map.get("expected_next"), now);
this.query_count = (int) parseLong((Number) map.get("query_count"));
this.retrieval_count = (int) parseLong((Number) map.get("retrieval_count"));
this.message_period = parseLong((Number) map.get("message_period"));
this.messages_per_day = (int) parseLong((Number) map.get("messages_per_day"));
this.score_retrieval = (int) parseLong((Number) map.get("score_retrieval"));
this.score_suggest = (int) parseLong((Number) map.get("score_suggest"));
}
/**
* update the query entry
* @param timeline the latest timeline retrieved from the target system
* @param byUserQuery is true, if the query was submitted by the user; false if the query was submitted by an automatic system
*/
public void update(final Timeline timeline, final boolean byUserQuery) {
this.retrieval_last = new Date();
this.retrieval_count++;
if (byUserQuery) {
this.query_count++;
this.query_last = this.retrieval_last;
}
long new_message_period = timeline.period(); // can be Long.MAX_VALUE if less than 2 messages are in timeline!
int new_messages_per_day = (int) (DAY_MILLIS / new_message_period); // this is an interpolation based on the last tweet list, can be 0!
if (new_message_period == Long.MAX_VALUE || new_messages_per_day == 0) {
this.message_period = this.message_period == 0 ? DAY_MILLIS : Math.min(DAY_MILLIS, this.message_period * 2);
} else {
this.message_period = this.message_period == 0 ? new_message_period : (this.message_period + new_message_period) / 2;
}
this.messages_per_day = (int) (DAY_MILLIS / this.message_period);
this.expected_next = new Date(this.retrieval_last.getTime() + ((long) (ttl_factor * this.message_period)));
long pivot_period = DAO.getConfig("retrieval.pivotfrequency", 10000);
long strategic_period = // if the period is far below the minimum, we apply a penalty
(this.message_period < pivot_period ?
pivot_period + 1000 * (long) Math.pow((pivot_period - this.message_period) / 1000, 3) :
this.message_period);
long waitingtime = Math.min(DAY_MILLIS, (long) (ttl_factor * RETRIEVAL_CONSTANT * strategic_period));
this.retrieval_next = new Date(this.retrieval_last.getTime() + waitingtime);
}
// to check the retrieval order created by the update method, call
// http://localhost:9000/api/suggest.json?orderby=retrieval_next&order=asc
/**
* A 'blind' update can be done if the user submits a query but there are rules which prevent that the target system is queried
* as well. Then the query result is calculated using the already stored messages. To reflect this, only the query-related
* attributes are changed.
*/
public void update() {
this.query_count++;
this.query_last = new Date();
}
public String getQuery() {
return this.query;
}
public int getQueryLength() {
return this.query_length;
}
public SourceType getSourceType() {
return this.source_type;
}
public Date getQueryFirst() {
return this.query_first;
}
public Date getQueryLast() {
return this.query_last;
}
public Date getRetrievalLast() {
return this.retrieval_last;
}
public Date getRetrievalNext() {
return this.retrieval_next;
}
public Date getExpectedNext() {
return this.expected_next;
}
public int getTimezoneOffset() {
return this.timezoneOffset;
}
public int getQueryCount() {
return this.query_count;
}
public int getRetrievalCount() {
return this.retrieval_count;
}
public int getMessagesPerDay() {
return this.messages_per_day;
}
@Override
public void toJSON(JsonGenerator json) {
try {
json.writeStartObject();
json.writeObjectField("query", this.query);
json.writeObjectField("query_length", this.query_length);
json.writeObjectField("source_type", this.source_type.name());
json.writeObjectField("timezoneOffset", this.timezoneOffset);
if (this.query_first != null) writeDate(json, "query_first", this.query_first.getTime());
if (this.query_last != null) writeDate(json, "query_last", this.query_last.getTime());
if (this.retrieval_last != null) writeDate(json, "retrieval_last", this.retrieval_last.getTime());
if (this.retrieval_next != null) writeDate(json, "retrieval_next", this.retrieval_next.getTime());
if (this.expected_next != null) writeDate(json, "expected_next", this.expected_next.getTime());
json.writeObjectField("query_count", this.query_count);
json.writeObjectField("retrieval_count", this.retrieval_count);
json.writeObjectField("message_period", this.message_period);
json.writeObjectField("messages_per_day", this.messages_per_day);
json.writeObjectField("score_retrieval", this.score_retrieval);
json.writeObjectField("score_suggest", this.score_suggest);
json.writeEndObject();
} catch (IOException e) {
}
}
private final static Pattern tokenizerPattern = Pattern.compile("([^\"]\\S*|\".+?\")\\s*"); // tokenizes Strings into terms respecting quoted parts
private static enum Constraint {
image("images"),
audio("audio"),
video("videos"),
place("place_name"),
location("location_point"),
link("links"),
mention("mentions"),
hashtag("hashtags");
protected String field_name;
protected Pattern pattern;
private Constraint(String field_name) {
this.field_name = field_name;
this.pattern = Pattern.compile("\\s?\\-?/" + this.name() + "\\S*");
}
}
public static String removeConstraints(String q) {
for (Constraint c: Constraint.values()) {
q = c.pattern.matcher(q).replaceAll("");
}
return q;
}
public static Timeline applyConstraint(Timeline tl0, String query) {
// tokenize the query
List<String> qe = new ArrayList<String>();
Matcher m = tokenizerPattern.matcher(query);
while (m.find()) qe.add(m.group(1));
HashSet<String> constraints_positive = new HashSet<>();
HashSet<String> constraints_negative = new HashSet<>();
for (String t: qe) {
if (t.startsWith("/")) constraints_positive.add(t.substring(1));
if (t.startsWith("-/")) constraints_negative.add(t.substring(2));
}
Timeline tl1 = new Timeline();
messageloop: for (MessageEntry message: tl0) {
if (constraints_positive.contains("image") && message.getImages().size() == 0) continue;
if (constraints_negative.contains("image") && message.getImages().size() != 0) continue;
if (constraints_positive.contains("place") && message.getPlaceName().length() == 0) continue;
if (constraints_negative.contains("place") && message.getPlaceName().length() != 0) continue;
if (constraints_positive.contains("location") && message.getLocationPoint() == null) continue;
if (constraints_negative.contains("location") && message.getLocationPoint() != null) continue;
if (constraints_positive.contains("link") && message.getLinks().length == 0) continue;
if (constraints_negative.contains("link") && message.getLinks().length != 0) continue;
if (constraints_positive.contains("mention") && message.getMentions().length == 0) continue;
if (constraints_negative.contains("mention") && message.getMentions().length != 0) continue;
if (constraints_positive.contains("hashtag") && message.getHashtags().length == 0) continue;
if (constraints_negative.contains("hashtag") && message.getHashtags().length != 0) continue;
// special treatment of location and link constraint
constraintCheck: for (String cs: constraints_positive) {
if (cs.startsWith(Constraint.location.name() + "=")) {
if (message.getLocationPoint() == null) continue messageloop;
String params = cs.substring(Constraint.location.name().length() + 1);
String[] coord = params.split(",");
if (coord.length == 4) {
double lon = message.getLocationPoint()[0];
double lon_west = Double.parseDouble(coord[0]);
double lon_east = Double.parseDouble(coord[2]);
if (lon < lon_west || lon > lon_east) continue messageloop;
double lat = message.getLocationPoint()[1];
double lat_south = Double.parseDouble(coord[1]);
double lat_north = Double.parseDouble(coord[3]);
if (lat < lat_south || lat > lat_north) continue messageloop;
}
}
if (cs.startsWith(Constraint.link.name() + "=")) {
if (message.getLinks().length == 0) continue messageloop;
Pattern regex = Pattern.compile(cs.substring(Constraint.link.name().length() + 1));
for (String link: message.getLinks()) {
if (regex.matcher(link).matches()) continue constraintCheck;
}
// no match
continue messageloop;
}
}
tl1.addTweet(message);
}
return tl1;
}
public static class ElasticsearchQuery {
QueryBuilder queryBuilder;
Date since, until;
public ElasticsearchQuery(String q, int timezoneOffset) {
// default values for since and util
this.since = new Date(0);
this.until = new Date(Long.MAX_VALUE);
// parse the query
this.queryBuilder = parse(q, timezoneOffset);
}
private QueryBuilder parse(String q, int timezoneOffset) {
// detect usage of OR junctor usage. Right now we cannot have mixed AND and OR usage. Thats a hack right now
q = q.replaceAll(" AND ", " "); // AND is default
boolean ORjunctor = q.indexOf(" OR ") >= 0;
q = q.replaceAll(" OR ", " "); // if we know that all terms are OR, we remove that and apply it later
// tokenize the query
List<String> qe = new ArrayList<String>();
Matcher m = tokenizerPattern.matcher(q);
while (m.find()) qe.add(m.group(1));
// twitter search syntax:
// term1 term2 term3 - all three terms shall appear
// "term1 term2 term3" - exact match of all terms
// term1 OR term2 OR term3 - any of the three terms shall appear
// from:user - tweets posted from that user
// to:user - tweets posted to that user
// @user - tweets which mention that user
// near:"location" within:xmi - tweets that are near that location
// #hashtag - tweets containing the given hashtag
// since:2015-04-01 until:2015-04-03 - tweets within given time range
// additional constraints:
// /image /audio /video /place - restrict to tweets which have attached images, audio, video or place
ArrayList<String> text_positive_match = new ArrayList<>();
ArrayList<String> text_negative_match = new ArrayList<>();
ArrayList<String> text_positive_filter = new ArrayList<>();
ArrayList<String> text_negative_filter = new ArrayList<>();
ArrayList<String> users_positive = new ArrayList<>();
ArrayList<String> users_negative = new ArrayList<>();
ArrayList<String> hashtags_positive = new ArrayList<>();
ArrayList<String> hashtags_negative = new ArrayList<>();
HashMap<String, String> modifier = new HashMap<>();
HashSet<String> constraints_positive = new HashSet<>();
HashSet<String> constraints_negative = new HashSet<>();
for (String t: qe) {
if (t.length() == 0) continue;
if (t.startsWith("@")) {
users_positive.add(t.substring(1));
continue;
} else if (t.startsWith("-@")) {
users_negative.add(t.substring(2));
continue;
} else if (t.startsWith("#")) {
hashtags_positive.add(t.substring(1));
continue;
} else if (t.startsWith("-#")) {
hashtags_negative.add(t.substring(2));
continue;
} else if (t.startsWith("/")) {
constraints_positive.add(t.substring(1));
continue;
} else if (t.startsWith("-/")) {
constraints_negative.add(t.substring(2));
continue;
} else if (t.indexOf(':') > 0) {
int p = t.indexOf(':');
modifier.put(t.substring(0, p).toLowerCase(), t.substring(p + 1));
continue;
} else {
// patch characters that will confuse elasticsearch or have a different meaning
boolean negative = t.startsWith("-");
if (negative) t = t.substring(1);
if (t.length() == 0) continue;
if ((t.charAt(0) == '"' && t.charAt(t.length() - 1) == '"') || (t.charAt(0) == '\'' && t.charAt(t.length() - 1) == '\'')) {
t = t.substring(1, t.length() - 1);
if (negative) text_negative_filter.add(t); else text_positive_filter.add(t);
} else if (t.indexOf('-') > 0) {
// this must be handled like a quoted string without the minus
t = t.replaceAll("-", " ");
if (negative) text_negative_filter.add(t); else text_positive_filter.add(t);
} else {
if (negative) text_negative_match.add(t); else text_positive_match.add(t);
}
continue;
}
}
if (modifier.containsKey("to")) users_positive.add(modifier.get("to"));
// compose query for text
BoolQueryBuilder bquery = QueryBuilders.boolQuery();
for (String text: text_positive_match) {
if (ORjunctor)
bquery.should(QueryBuilders.matchQuery("text", text));
else
bquery.must(QueryBuilders.matchQuery("text", text));
}
for (String text: text_negative_match) {
// negation of terms in disjunctions would cause to retrieve almost all documents
// this cannot be the requirement of the user. It may be valid in conjunctions, but not in disjunctions
bquery.mustNot(QueryBuilders.matchQuery("text", text));
}
// apply modifiers
if (modifier.containsKey("id")) bquery.must(QueryBuilders.termQuery("id_str", modifier.get("id")));
if (modifier.containsKey("-id")) bquery.mustNot(QueryBuilders.termQuery("id_str", modifier.get("-id")));
if (modifier.containsKey("from")) {
String screen_name = modifier.get("from");
if (screen_name.indexOf(',') < 0) {
bquery.must(QueryBuilders.termQuery("screen_name", screen_name));
} else {
String[] screen_names = screen_name.split(",");
BoolQueryBuilder disjunction = QueryBuilders.boolQuery();
for (String name: screen_names) disjunction.should(QueryBuilders.termQuery("screen_name", name));
bquery.must(disjunction);
}
}
if (modifier.containsKey("-from")) bquery.mustNot(QueryBuilders.termQuery("screen_name", modifier.get("-from")));
if (modifier.containsKey("near")) {
BoolQueryBuilder nearquery = QueryBuilders.boolQuery()
.should(QueryBuilders.matchQuery("place_name", modifier.get("near")))
.should(QueryBuilders.matchQuery("text", modifier.get("near")));
bquery.must(nearquery);
}
if (modifier.containsKey("since")) try {
Calendar since = DateParser.parse(modifier.get("since"), timezoneOffset);
this.since = since.getTime();
RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery("created_at").from(this.since);
if (modifier.containsKey("until")) {
Calendar until = DateParser.parse(modifier.get("until"), timezoneOffset);
if (until.get(Calendar.HOUR) == 0 && until.get(Calendar.MINUTE) == 0) {
// until must be the day which is included in results.
// To get the result within the same day, we must add one day.
until.add(Calendar.DATE, 1);
}
this.until = until.getTime();
rangeQuery.to(this.until);
} else {
this.until = new Date(Long.MAX_VALUE);
}
bquery.must(rangeQuery);
} catch (ParseException e) {} else if (modifier.containsKey("until")) try {
Calendar until = DateParser.parse(modifier.get("until"), timezoneOffset);
if (until.get(Calendar.HOUR) == 0 && until.get(Calendar.MINUTE) == 0) {
// until must be the day which is included in results.
// To get the result within the same day, we must add one day.
until.add(Calendar.DATE, 1);
}
this.until = until.getTime();
RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery("created_at").to(this.until);
bquery.must(rangeQuery);
} catch (ParseException e) {}
// apply constraints as filters
QueryBuilder cquery = bquery;
for (String text: text_positive_filter) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.termsFilter("text", text));
for (String text: text_negative_filter) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.termsFilter("text", text)));
for (String user: users_positive) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.inFilter("mentions", user));
for (String user: users_negative) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.inFilter("mentions", user)));
for (String hashtag: hashtags_positive) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.inFilter("hashtags", hashtag.toLowerCase()));
for (String hashtag: hashtags_negative) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.inFilter("hashtags", hashtag.toLowerCase())));
for (Constraint c: Constraint.values()) {
if (constraints_positive.contains(c.name())) {
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.existsFilter(c.field_name));
}
if (constraints_negative.contains(c.name())) {
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.existsFilter(c.field_name)));
}
}
// special treatment of location constraints of the form /location=lon-west,lat-south,lon-east,lat-north i.e. /location=8.58,50.178,8.59,50.181
for (String cs: constraints_positive) {
if (cs.startsWith(Constraint.location.name() + "=")) {
String params = cs.substring(Constraint.location.name().length() + 1);
String[] coord = params.split(",");
if (coord.length == 4) {
double lon_west = Double.parseDouble(coord[0]);
double lat_south = Double.parseDouble(coord[1]);
double lon_east = Double.parseDouble(coord[2]);
double lat_north = Double.parseDouble(coord[3]);
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.existsFilter(Constraint.location.field_name));
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.geoBoundingBoxFilter("location_point")
.topLeft(lat_north, lon_west)
.bottomRight(lat_south, lon_east));
}
}
if (cs.startsWith(Constraint.link.name() + "=")) {
String regexp = cs.substring(Constraint.link.name().length() + 1);
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.existsFilter(Constraint.link.field_name));
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.regexpFilter(Constraint.link.field_name, regexp));
}
}
return cquery;
}
}
}
|
src/org/loklak/data/QueryEntry.java
|
/**
* Query
* Copyright 26.04.2015 by Michael Peter Christen, @0rb1t3r
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; wo even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package org.loklak.data;
import java.io.IOException;
import java.net.MalformedURLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.FilterBuilders;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.RangeQueryBuilder;
import org.loklak.harvester.SourceType;
import org.loklak.tools.DateParser;
import com.fasterxml.jackson.core.JsonGenerator;
/**
* A Query is a recording of a search result based on the query.
* THIS IS NOT RECORDED TO TRACK USER ACTIONS, THIS IS USED TO RE-SEARCH A QUERY INDEFINITELY!
* Each query will be stored in elasticsearch and retrieved by the caretaker process in
* order of the retrieval_next field. That date is calculated based on the number of search results
* in the last time; the retrieval_next is estimated based on the time interval of all tweets in
* the search results of the last query.
*
* Privacy is important:
* TO ALL COMMITTERS: please do not add any user-identification details to the data structures
* to protect the privacy of the users; TO CODE EVALUATORS: please look for yourself that this
* code does not contain any user-related information (like IP, user agent etc.).
*/
public class QueryEntry extends AbstractIndexEntry implements IndexEntry {
private final static long DAY_MILLIS = 1000L * 60L * 60L * 24L;
private final static int RETRIEVAL_CONSTANT = 20; // the number of messages that we get with each retrieval at maximum
public static double ttl_factor = 0.5d;
protected String query; // the query in the exact way as the user typed it in
protected int query_length; // the length in the query, number of characters
protected SourceType source_type; // the (external) retrieval system where that query was submitted
protected int timezoneOffset; // the timezone offset of the user
protected Date query_first; // the date when this query was submitted by the user the first time
protected Date query_last; // the date when this query was submitted by the user the last time
protected Date retrieval_last; // the last time when this query was submitted to the external system
protected Date retrieval_next; // the estimated next time when the query should be submitted to get all messages
protected Date expected_next; // the estimated next time when one single message will appear
protected int query_count; // the number of queries by the user of that query done so far
protected int retrieval_count; // the number of retrievals of that query done so far to the external system
protected long message_period; // the estimated period length between two messages
protected int messages_per_day; // a message frequency based on the last query
protected long score_retrieval; // score for the retrieval order
protected long score_suggest; // score for the suggest order
/**
* This initializer can only be used for first-time creation of a query track.
* @param query
* @param timezoneOffset
* @param timeline
* @param source_type
* @throws MalformedURLException
*/
public QueryEntry(final String query, final int timezoneOffset, final Timeline timeline, final SourceType source_type, final boolean byUserQuery) {
this.query = query;
this.query_length = query.length();
this.timezoneOffset = timezoneOffset;
this.source_type = source_type;
this.retrieval_count = 0; // will be set to 1 with first update
this.message_period = 0; // means: unknown
this.messages_per_day = 0; // means: unknown
this.score_retrieval = 0;
this.score_suggest = 0;
update(timeline, byUserQuery);
this.query_first = retrieval_last;
}
protected QueryEntry(Map<String, Object> map) throws IllegalArgumentException {
init(map);
}
public void init(Map<String, Object> map) throws IllegalArgumentException {
this.query = (String) map.get("query");
this.query_length = (int) parseLong((Number) map.get("query_length"));
String source_type_string = (String) map.get("source_type"); if (source_type_string == null) source_type_string = SourceType.USER.name();
this.source_type = SourceType.valueOf(source_type_string);
this.timezoneOffset = (int) parseLong((Number) map.get("timezoneOffset"));
Date now = new Date();
this.query_first = parseDate(map.get("query_first"), now);
this.query_last = parseDate(map.get("query_last"), now);
this.retrieval_last = parseDate(map.get("retrieval_last"), now);
this.retrieval_next = parseDate(map.get("retrieval_next"), now);
this.expected_next = parseDate(map.get("expected_next"), now);
this.query_count = (int) parseLong((Number) map.get("query_count"));
this.retrieval_count = (int) parseLong((Number) map.get("retrieval_count"));
this.message_period = parseLong((Number) map.get("message_period"));
this.messages_per_day = (int) parseLong((Number) map.get("messages_per_day"));
this.score_retrieval = (int) parseLong((Number) map.get("score_retrieval"));
this.score_suggest = (int) parseLong((Number) map.get("score_suggest"));
}
/**
* update the query entry
* @param timeline the latest timeline retrieved from the target system
* @param byUserQuery is true, if the query was submitted by the user; false if the query was submitted by an automatic system
*/
public void update(final Timeline timeline, final boolean byUserQuery) {
this.retrieval_last = new Date();
this.retrieval_count++;
if (byUserQuery) {
this.query_count++;
this.query_last = this.retrieval_last;
}
long new_message_period = timeline.period(); // can be Long.MAX_VALUE if less than 2 messages are in timeline!
int new_messages_per_day = (int) (DAY_MILLIS / new_message_period); // this is an interpolation based on the last tweet list, can be 0!
if (new_message_period == Long.MAX_VALUE || new_messages_per_day == 0) {
this.message_period = this.message_period == 0 ? DAY_MILLIS : Math.min(DAY_MILLIS, this.message_period * 2);
} else {
this.message_period = this.message_period == 0 ? new_message_period : (this.message_period + new_message_period) / 2;
}
this.messages_per_day = (int) (DAY_MILLIS / this.message_period);
this.expected_next = new Date(this.retrieval_last.getTime() + ((long) (ttl_factor * this.message_period)));
long pivot_period = DAO.getConfig("retrieval.pivotfrequency", 10000);
long strategic_period = // if the period is far below the minimum, we apply a penalty
(this.message_period < pivot_period ?
pivot_period + 1000 * (long) Math.pow((pivot_period - this.message_period) / 1000, 3) :
this.message_period);
long waitingtime = Math.min(DAY_MILLIS, (long) (ttl_factor * RETRIEVAL_CONSTANT * strategic_period));
this.retrieval_next = new Date(this.retrieval_last.getTime() + waitingtime);
}
// to check the retrieval order created by the update method, call
// http://localhost:9000/api/suggest.json?orderby=retrieval_next&order=asc
/**
* A 'blind' update can be done if the user submits a query but there are rules which prevent that the target system is queried
* as well. Then the query result is calculated using the already stored messages. To reflect this, only the query-related
* attributes are changed.
*/
public void update() {
this.query_count++;
this.query_last = new Date();
}
public String getQuery() {
return this.query;
}
public int getQueryLength() {
return this.query_length;
}
public SourceType getSourceType() {
return this.source_type;
}
public Date getQueryFirst() {
return this.query_first;
}
public Date getQueryLast() {
return this.query_last;
}
public Date getRetrievalLast() {
return this.retrieval_last;
}
public Date getRetrievalNext() {
return this.retrieval_next;
}
public Date getExpectedNext() {
return this.expected_next;
}
public int getTimezoneOffset() {
return this.timezoneOffset;
}
public int getQueryCount() {
return this.query_count;
}
public int getRetrievalCount() {
return this.retrieval_count;
}
public int getMessagesPerDay() {
return this.messages_per_day;
}
@Override
public void toJSON(JsonGenerator json) {
try {
json.writeStartObject();
json.writeObjectField("query", this.query);
json.writeObjectField("query_length", this.query_length);
json.writeObjectField("source_type", this.source_type.name());
json.writeObjectField("timezoneOffset", this.timezoneOffset);
if (this.query_first != null) writeDate(json, "query_first", this.query_first.getTime());
if (this.query_last != null) writeDate(json, "query_last", this.query_last.getTime());
if (this.retrieval_last != null) writeDate(json, "retrieval_last", this.retrieval_last.getTime());
if (this.retrieval_next != null) writeDate(json, "retrieval_next", this.retrieval_next.getTime());
if (this.expected_next != null) writeDate(json, "expected_next", this.expected_next.getTime());
json.writeObjectField("query_count", this.query_count);
json.writeObjectField("retrieval_count", this.retrieval_count);
json.writeObjectField("message_period", this.message_period);
json.writeObjectField("messages_per_day", this.messages_per_day);
json.writeObjectField("score_retrieval", this.score_retrieval);
json.writeObjectField("score_suggest", this.score_suggest);
json.writeEndObject();
} catch (IOException e) {
}
}
private final static Pattern tokenizerPattern = Pattern.compile("([^\"]\\S*|\".+?\")\\s*"); // tokenizes Strings into terms respecting quoted parts
private static enum Constraint {
image("images"),
audio("audio"),
video("videos"),
place("place_name"),
location("location_point"),
link("links"),
mention("mentions"),
hashtag("hashtags");
protected String field_name;
protected Pattern pattern;
private Constraint(String field_name) {
this.field_name = field_name;
this.pattern = Pattern.compile("\\s?\\-?/" + this.name() + "\\S*");
}
}
public static String removeConstraints(String q) {
for (Constraint c: Constraint.values()) {
q = c.pattern.matcher(q).replaceAll("");
}
return q;
}
public static Timeline applyConstraint(Timeline tl0, String query) {
// tokenize the query
List<String> qe = new ArrayList<String>();
Matcher m = tokenizerPattern.matcher(query);
while (m.find()) qe.add(m.group(1));
HashSet<String> constraints_positive = new HashSet<>();
HashSet<String> constraints_negative = new HashSet<>();
for (String t: qe) {
if (t.startsWith("/")) constraints_positive.add(t.substring(1));
if (t.startsWith("-/")) constraints_negative.add(t.substring(2));
}
Timeline tl1 = new Timeline();
messageloop: for (MessageEntry message: tl0) {
if (constraints_positive.contains("image") && message.getImages().size() == 0) continue;
if (constraints_negative.contains("image") && message.getImages().size() != 0) continue;
if (constraints_positive.contains("place") && message.getPlaceName().length() == 0) continue;
if (constraints_negative.contains("place") && message.getPlaceName().length() != 0) continue;
if (constraints_positive.contains("location") && message.getLocationPoint() == null) continue;
if (constraints_negative.contains("location") && message.getLocationPoint() != null) continue;
if (constraints_positive.contains("link") && message.getLinks().length == 0) continue;
if (constraints_negative.contains("link") && message.getLinks().length != 0) continue;
if (constraints_positive.contains("mention") && message.getMentions().length == 0) continue;
if (constraints_negative.contains("mention") && message.getMentions().length != 0) continue;
if (constraints_positive.contains("hashtag") && message.getHashtags().length == 0) continue;
if (constraints_negative.contains("hashtag") && message.getHashtags().length != 0) continue;
// special treatment of location and link constraint
constraintCheck: for (String cs: constraints_positive) {
if (cs.startsWith(Constraint.location.name() + "=")) {
if (message.getLocationPoint() == null) continue messageloop;
String params = cs.substring(Constraint.location.name().length() + 1);
String[] coord = params.split(",");
if (coord.length == 4) {
double lon = message.getLocationPoint()[0];
double lon_west = Double.parseDouble(coord[0]);
double lon_east = Double.parseDouble(coord[2]);
if (lon < lon_west || lon > lon_east) continue messageloop;
double lat = message.getLocationPoint()[1];
double lat_south = Double.parseDouble(coord[1]);
double lat_north = Double.parseDouble(coord[3]);
if (lat < lat_south || lat > lat_north) continue messageloop;
}
}
if (cs.startsWith(Constraint.link.name() + "=")) {
if (message.getLinks().length == 0) continue messageloop;
Pattern regex = Pattern.compile(cs.substring(Constraint.link.name().length() + 1));
for (String link: message.getLinks()) {
if (regex.matcher(link).matches()) continue constraintCheck;
}
// no match
continue messageloop;
}
}
tl1.addTweet(message);
}
return tl1;
}
public static class ElasticsearchQuery {
QueryBuilder queryBuilder;
Date since, until;
public ElasticsearchQuery(String q, int timezoneOffset) {
// default values for since and util
this.since = new Date(0);
this.until = new Date(Long.MAX_VALUE);
// parse the query
this.queryBuilder = parse(q, timezoneOffset);
}
private QueryBuilder parse(String q, int timezoneOffset) {
// detect usage of OR junctor usage. Right now we cannot have mixed AND and OR usage. Thats a hack right now
q = q.replaceAll(" AND ", " "); // AND is default
boolean ORjunctor = q.indexOf(" OR ") >= 0;
q = q.replaceAll(" OR ", " "); // if we know that all terms are OR, we remove that and apply it later
// tokenize the query
List<String> qe = new ArrayList<String>();
Matcher m = tokenizerPattern.matcher(q);
while (m.find()) qe.add(m.group(1));
// twitter search syntax:
// term1 term2 term3 - all three terms shall appear
// "term1 term2 term3" - exact match of all terms
// term1 OR term2 OR term3 - any of the three terms shall appear
// from:user - tweets posted from that user
// to:user - tweets posted to that user
// @user - tweets which mention that user
// near:"location" within:xmi - tweets that are near that location
// #hashtag - tweets containing the given hashtag
// since:2015-04-01 until:2015-04-03 - tweets within given time range
// additional constraints:
// /image /audio /video /place - restrict to tweets which have attached images, audio, video or place
ArrayList<String> text_positive_match = new ArrayList<>();
ArrayList<String> text_negative_match = new ArrayList<>();
ArrayList<String> text_positive_filter = new ArrayList<>();
ArrayList<String> text_negative_filter = new ArrayList<>();
ArrayList<String> users_positive = new ArrayList<>();
ArrayList<String> users_negative = new ArrayList<>();
ArrayList<String> hashtags_positive = new ArrayList<>();
ArrayList<String> hashtags_negative = new ArrayList<>();
HashMap<String, String> modifier = new HashMap<>();
HashSet<String> constraints_positive = new HashSet<>();
HashSet<String> constraints_negative = new HashSet<>();
for (String t: qe) {
if (t.length() == 0) continue;
if (t.startsWith("@")) {
users_positive.add(t.substring(1));
continue;
} else if (t.startsWith("-@")) {
users_negative.add(t.substring(2));
continue;
} else if (t.startsWith("#")) {
hashtags_positive.add(t.substring(1));
continue;
} else if (t.startsWith("-#")) {
hashtags_negative.add(t.substring(2));
continue;
} else if (t.startsWith("/")) {
constraints_positive.add(t.substring(1));
continue;
} else if (t.startsWith("-/")) {
constraints_negative.add(t.substring(2));
continue;
} else if (t.indexOf(':') > 0) {
int p = t.indexOf(':');
modifier.put(t.substring(0, p).toLowerCase(), t.substring(p + 1));
continue;
} else {
// patch characters that will confuse elasticsearch or have a different meaning
boolean negative = t.startsWith("-");
if (negative) t = t.substring(1);
if (t.length() == 0) continue;
if ((t.charAt(0) == '"' && t.charAt(t.length() - 1) == '"') || (t.charAt(0) == '\'' && t.charAt(t.length() - 1) == '\'')) {
t = t.substring(1, t.length() - 1);
if (negative) text_negative_filter.add(t); else text_positive_filter.add(t);
} else if (t.indexOf('-') > 0) {
// this must be handled like a quoted string without the minus
t = t.replaceAll("-", " ");
if (negative) text_negative_filter.add(t); else text_positive_filter.add(t);
} else {
if (negative) text_negative_match.add(t); else text_positive_match.add(t);
}
continue;
}
}
if (modifier.containsKey("to")) users_positive.add(modifier.get("to"));
// compose query
BoolQueryBuilder bquery = QueryBuilders.boolQuery();
for (String text: text_positive_match) {
if (ORjunctor)
bquery.should(QueryBuilders.matchQuery("text", text));
else
bquery.must(QueryBuilders.matchQuery("text", text));
}
for (String text: text_negative_match) {
// negation of terms in disjunctions would cause to retrieve almost all documents
// this cannot be the requirement of the user. It may be valid in conjunctions, but not in disjunctions
bquery.mustNot(QueryBuilders.matchQuery("text", text));
}
if (modifier.containsKey("id")) bquery.must(QueryBuilders.termQuery("id_str", modifier.get("id")));
if (modifier.containsKey("-id")) bquery.mustNot(QueryBuilders.termQuery("id_str", modifier.get("-id")));
if (modifier.containsKey("from")) bquery.must(QueryBuilders.termQuery("screen_name", modifier.get("from")));
if (modifier.containsKey("-from")) bquery.mustNot(QueryBuilders.termQuery("screen_name", modifier.get("-from")));
if (modifier.containsKey("near")) {
BoolQueryBuilder nearquery = QueryBuilders.boolQuery()
.should(QueryBuilders.matchQuery("place_name", modifier.get("near")))
.should(QueryBuilders.matchQuery("text", modifier.get("near")));
bquery.must(nearquery);
}
if (modifier.containsKey("since")) try {
Calendar since = DateParser.parse(modifier.get("since"), timezoneOffset);
this.since = since.getTime();
RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery("created_at").from(this.since);
if (modifier.containsKey("until")) {
Calendar until = DateParser.parse(modifier.get("until"), timezoneOffset);
if (until.get(Calendar.HOUR) == 0 && until.get(Calendar.MINUTE) == 0) {
// until must be the day which is included in results.
// To get the result within the same day, we must add one day.
until.add(Calendar.DATE, 1);
}
this.until = until.getTime();
rangeQuery.to(this.until);
} else {
this.until = new Date(Long.MAX_VALUE);
}
bquery.must(rangeQuery);
} catch (ParseException e) {} else if (modifier.containsKey("until")) try {
Calendar until = DateParser.parse(modifier.get("until"), timezoneOffset);
if (until.get(Calendar.HOUR) == 0 && until.get(Calendar.MINUTE) == 0) {
// until must be the day which is included in results.
// To get the result within the same day, we must add one day.
until.add(Calendar.DATE, 1);
}
this.until = until.getTime();
RangeQueryBuilder rangeQuery = QueryBuilders.rangeQuery("created_at").to(this.until);
bquery.must(rangeQuery);
} catch (ParseException e) {}
// apply constraints as filters
QueryBuilder cquery = bquery;
for (String text: text_positive_filter) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.termsFilter("text", text));
for (String text: text_negative_filter) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.termsFilter("text", text)));
for (String user: users_positive) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.inFilter("mentions", user));
for (String user: users_negative) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.inFilter("mentions", user)));
for (String hashtag: hashtags_positive) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.inFilter("hashtags", hashtag.toLowerCase()));
for (String hashtag: hashtags_negative) cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.inFilter("hashtags", hashtag.toLowerCase())));
for (Constraint c: Constraint.values()) {
if (constraints_positive.contains(c.name())) {
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.existsFilter(c.field_name));
}
if (constraints_negative.contains(c.name())) {
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.notFilter(FilterBuilders.existsFilter(c.field_name)));
}
}
// special treatment of location constraints of the form /location=lon-west,lat-south,lon-east,lat-north i.e. /location=8.58,50.178,8.59,50.181
for (String cs: constraints_positive) {
if (cs.startsWith(Constraint.location.name() + "=")) {
String params = cs.substring(Constraint.location.name().length() + 1);
String[] coord = params.split(",");
if (coord.length == 4) {
double lon_west = Double.parseDouble(coord[0]);
double lat_south = Double.parseDouble(coord[1]);
double lon_east = Double.parseDouble(coord[2]);
double lat_north = Double.parseDouble(coord[3]);
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.existsFilter(Constraint.location.field_name));
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.geoBoundingBoxFilter("location_point")
.topLeft(lat_north, lon_west)
.bottomRight(lat_south, lon_east));
}
}
if (cs.startsWith(Constraint.link.name() + "=")) {
String regexp = cs.substring(Constraint.link.name().length() + 1);
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.existsFilter(Constraint.link.field_name));
cquery = QueryBuilders.filteredQuery(cquery, FilterBuilders.regexpFilter(Constraint.link.field_name, regexp));
}
}
return cquery;
}
}
}
|
modified the from: - modifier in such a way that it is able to select
for several users, which must be given in a comma-separated list.
I.e. from:usera,userb will select all tweets from user usera and user
userb
|
src/org/loklak/data/QueryEntry.java
|
modified the from: - modifier in such a way that it is able to select for several users, which must be given in a comma-separated list. I.e. from:usera,userb will select all tweets from user usera and user userb
|
|
Java
|
unlicense
|
8451013d2fc6128f3126e97fda74f17d0e3f15ed
| 0
|
mashariqk/excel2tab
|
package com.excel2tab.util;
import java.io.File;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Properties;
import org.apache.poi.ss.usermodel.DateUtil;
import org.apache.poi.xssf.usermodel.XSSFCell;
import org.apache.poi.xssf.usermodel.XSSFRow;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
public class ConvertToTab {
public static void main (String[] args) throws Exception {
Properties props = new Properties();
File excelFile =null;
ArrayList<Order> orders;
try{
InputStream resourceStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("project.properties");
props.load(resourceStream);
}catch(Exception e){
e.printStackTrace();
}
try {
excelFile = new File(props.getProperty("inputExcel"));
} catch (Exception e) {
e.printStackTrace();
}
XSSFWorkbook wb = new XSSFWorkbook(excelFile);
XSSFSheet sheet = wb.getSheetAt(0);
int rowsCount = sheet.getLastRowNum();
System.out.println("rowsCount is "+rowsCount);
for (int i = 0; i <= rowsCount; i++) {
XSSFRow row = sheet.getRow(i);
int colCounts = row.getLastCellNum();
System.out.println("Total Number of Cols: " + colCounts);
for (int j = 0; j < colCounts; j++) {
XSSFCell cell = row.getCell(j);
if(cell != null){
int cellType = cell.getCellType();
String cellValueAsString;
//Print the contents on the console
if(XSSFCell.CELL_TYPE_BLANK == cellType){
cellValueAsString = null;
System.out.println("CELL_TYPE_BLANK[" + i + "," + j + "]= ''");
} else if(XSSFCell.CELL_TYPE_BOOLEAN == cellType){
cellValueAsString = "" +cell.getBooleanCellValue();
System.out.println("CELL_TYPE_BOOLEAN[" + i + "," + j + "]=" + cell.getBooleanCellValue());
} else if(XSSFCell.CELL_TYPE_ERROR == cellType){
cellValueAsString = cell.getErrorCellString();
System.out.println("CELL_TYPE_ERROR[" + i + "," + j + "]=" + cell.getErrorCellString());
} else if(XSSFCell.CELL_TYPE_FORMULA == cellType){
cellValueAsString = cell.getCellFormula();
System.out.println("CELL_TYPE_FORMULA[" + i + "," + j + "]=" + cell.getCellFormula());
} else if(XSSFCell.CELL_TYPE_NUMERIC == cellType){
if(DateUtil.isCellDateFormatted(cell)){
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy");
cellValueAsString = sdf.format(cell.getDateCellValue());
}else{
cellValueAsString = "" + cell.getNumericCellValue();
}
System.out.println("CELL_TYPE_NUMERIC[" + i + "," + j + "]=" + cell.getNumericCellValue());
} else{
if(DateUtil.isCellDateFormatted(cell)){
SimpleDateFormat sdf = new SimpleDateFormat("MM/dd/yyyy");
cellValueAsString = sdf.format(cell.getDateCellValue());
}else{
cellValueAsString = "" + cell.getStringCellValue();
}
System.out.println("CELL_TYPE_STRING[" + i + "," + j + "]=" + cell.getStringCellValue());
}
}
}
}
}
}
|
src/main/java/com/excel2tab/util/ConvertToTab.java
|
package com.excel2tab.util;
import java.io.File;
import java.io.InputStream;
import java.util.Properties;
import org.apache.poi.xssf.usermodel.XSSFSheet;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
public class ConvertToTab {
public static void main (String[] args) throws Exception {
Properties props = new Properties();
File excelFile =null;
try{
InputStream resourceStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("project.properties");
props.load(resourceStream);
}catch(Exception e){
e.printStackTrace();
}
try {
excelFile = new File(props.getProperty("inputExcel"));
} catch (Exception e) {
e.printStackTrace();
}
XSSFWorkbook wb = new XSSFWorkbook(excelFile);
XSSFSheet sheet = wb.getSheetAt(0);
}
}
|
dev commit
|
src/main/java/com/excel2tab/util/ConvertToTab.java
|
dev commit
|
|
Java
|
apache-2.0
|
462488624022a1cff16bebed6b3bb08b17ca80ab
| 0
|
mrjoel/gitblit,1234-/gitblit,fzs/gitblit,vitalif/gitblit,korealerts1/gitblit,two-ack/gitblit,RainerW/gitblit,lucamilanesio/gitblit,firateren52/gitblit,yonglehou/gitblit,hstonel/gitblit,BullShark/IRCBlit,yonglehou/gitblit,ahnjune881214/gitblit,davideuler/gitblit,davideuler/gitblit,firateren52/gitblit,pdinc-oss/gitblit,dispositional/gitblit,wellington-junio/gitblit,ahnjune881214/gitblit,dispositional/gitblit,lucamilanesio/gitblit,RainerW/gitblit,fuero/gitblit,hstonel/gitblit,paulsputer/gitblit,paladox/gitblit,gitblit/gitblit,dispositional/gitblit,heavenlyhash/gitblit,korealerts1/gitblit,gzsombor/gitblit,mrjoel/gitblit,paladox/gitblit,gzsombor/gitblit,lucamilanesio/gitblit,mystygage/gitblit,fzs/gitblit,BullShark/IRCBlit,vitalif/gitblit,paladox/gitblit,lucamilanesio/gitblit,gitblit/gitblit,cesarmarinhorj/gitblit,davideuler/gitblit,mystygage/gitblit,ahnjune881214/gitblit,two-ack/gitblit,RainerW/gitblit,lucamilanesio/gitblit,gitblit/gitblit,cesarmarinhorj/gitblit,gitblit/gitblit,culmat/gitblit,heavenlyhash/gitblit,ahnjune881214/gitblit,firateren52/gitblit,hstonel/gitblit,1234-/gitblit,1234-/gitblit,fzs/gitblit,RainerW/gitblit,paulsputer/gitblit,wellington-junio/gitblit,fzs/gitblit,ahnjune881214/gitblit,mystygage/gitblit,hstonel/gitblit,yonglehou/gitblit,fuero/gitblit,cesarmarinhorj/gitblit,gzsombor/gitblit,paulsputer/gitblit,firateren52/gitblit,fuero/gitblit,mrjoel/gitblit,Distrotech/gitblit,BullShark/IRCBlit,heavenlyhash/gitblit,dispositional/gitblit,pombreda/gitblit,culmat/gitblit,firateren52/gitblit,korealerts1/gitblit,pombreda/gitblit,paladox/gitblit,Distrotech/gitblit,cesarmarinhorj/gitblit,two-ack/gitblit,pdinc-oss/gitblit,1234-/gitblit,vitalif/gitblit,yonglehou/gitblit,paladox/gitblit,gitblit/gitblit,paulsputer/gitblit,davideuler/gitblit,dispositional/gitblit,vitalif/gitblit,wellington-junio/gitblit,two-ack/gitblit,vitalif/gitblit,paulsputer/gitblit,korealerts1/gitblit,two-ack/gitblit,culmat/gitblit,Distrotech/gitblit,mystygage/gitblit,wellington-junio/gitblit,RainerW/gitblit,pombreda/gitblit,mrjoel/gitblit,hstonel/gitblit,davideuler/gitblit,pombreda/gitblit,gzsombor/gitblit,pombreda/gitblit,Distrotech/gitblit,BullShark/IRCBlit,fzs/gitblit,1234-/gitblit,mrjoel/gitblit,fuero/gitblit,mystygage/gitblit,wellington-junio/gitblit,korealerts1/gitblit,pdinc-oss/gitblit,cesarmarinhorj/gitblit,fuero/gitblit,yonglehou/gitblit
|
/*
* Copyright 2012 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit;
import static org.eclipse.jgit.treewalk.filter.TreeFilter.ANY_DIFF;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.DateTools;
import org.apache.lucene.document.DateTools.Resolution;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.eclipse.jgit.diff.DiffEntry.ChangeType;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.treewalk.EmptyTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.util.FS;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.Constants.SearchObjectType;
import com.gitblit.models.IssueModel;
import com.gitblit.models.IssueModel.Attachment;
import com.gitblit.models.PathModel.PathChangeModel;
import com.gitblit.models.RefModel;
import com.gitblit.models.RepositoryModel;
import com.gitblit.models.SearchResult;
import com.gitblit.utils.ArrayUtils;
import com.gitblit.utils.IssueUtils;
import com.gitblit.utils.JGitUtils;
import com.gitblit.utils.StringUtils;
/**
* The Lucene executor handles indexing and searching repositories.
*
* @author James Moger
*
*/
public class LuceneExecutor implements Runnable {
private static final int INDEX_VERSION = 2;
private static final String FIELD_OBJECT_TYPE = "type";
private static final String FIELD_ISSUE = "issue";
private static final String FIELD_PATH = "path";
private static final String FIELD_COMMIT = "commit";
private static final String FIELD_BRANCH = "branch";
private static final String FIELD_SUMMARY = "summary";
private static final String FIELD_CONTENT = "content";
private static final String FIELD_AUTHOR = "author";
private static final String FIELD_COMMITTER = "committer";
private static final String FIELD_DATE = "date";
private static final String FIELD_TAG = "tag";
private static final String FIELD_LABEL = "label";
private static final String FIELD_ATTACHMENT = "attachment";
private static final String CONF_FILE = "lucene.conf";
private static final String LUCENE_DIR = "lucene";
private static final String CONF_INDEX = "index";
private static final String CONF_VERSION = "version";
private static final String CONF_ALIAS = "aliases";
private static final String CONF_BRANCH = "branches";
private static final Version LUCENE_VERSION = Version.LUCENE_35;
private final Logger logger = LoggerFactory.getLogger(LuceneExecutor.class);
private final IStoredSettings storedSettings;
private final File repositoriesFolder;
private final Map<String, IndexSearcher> searchers = new ConcurrentHashMap<String, IndexSearcher>();
private final Map<String, IndexWriter> writers = new ConcurrentHashMap<String, IndexWriter>();
private final String luceneIgnoreExtensions = "7z arc arj bin bmp dll doc docx exe gif gz jar jpg lib lzh odg odf odt pdf ppt png so swf xcf xls xlsx zip";
private Set<String> excludedExtensions;
public LuceneExecutor(IStoredSettings settings, File repositoriesFolder) {
this.storedSettings = settings;
this.repositoriesFolder = repositoriesFolder;
String exts = luceneIgnoreExtensions;
if (settings != null) {
exts = settings.getString(Keys.web.luceneIgnoreExtensions, exts);
}
excludedExtensions = new TreeSet<String>(StringUtils.getStringsFromValue(exts));
}
/**
* Run is executed by the Gitblit executor service. Because this is called
* by an executor service, calls will queue - i.e. there can never be
* concurrent execution of repository index updates.
*/
@Override
public void run() {
// reload the excluded extensions
String exts = storedSettings.getString(Keys.web.luceneIgnoreExtensions, luceneIgnoreExtensions);
excludedExtensions = new TreeSet<String>(StringUtils.getStringsFromValue(exts));
for (String repositoryName: GitBlit.self().getRepositoryList()) {
RepositoryModel model = GitBlit.self().getRepositoryModel(repositoryName);
if (model.hasCommits && !ArrayUtils.isEmpty(model.indexedBranches)) {
Repository repository = GitBlit.self().getRepository(model.name);
index(model, repository);
repository.close();
System.gc();
}
}
}
/**
* Synchronously indexes a repository. This may build a complete index of a
* repository or it may update an existing index.
*
* @param name
* the name of the repository
* @param repository
* the repository object
*/
private void index(RepositoryModel model, Repository repository) {
try {
if (shouldReindex(repository)) {
// (re)build the entire index
IndexResult result = reindex(model, repository);
if (result.success) {
if (result.commitCount > 0) {
String msg = "Built {0} Lucene index from {1} commits and {2} files across {3} branches in {4} secs";
logger.info(MessageFormat.format(msg, model.name, result.commitCount,
result.blobCount, result.branchCount, result.duration()));
}
} else {
String msg = "Could not build {0} Lucene index!";
logger.error(MessageFormat.format(msg, model.name));
}
} else {
// update the index with latest commits
IndexResult result = updateIndex(model, repository);
if (result.success) {
if (result.commitCount > 0) {
String msg = "Updated {0} Lucene index with {1} commits and {2} files across {3} branches in {4} secs";
logger.info(MessageFormat.format(msg, model.name, result.commitCount,
result.blobCount, result.branchCount, result.duration()));
}
} else {
String msg = "Could not update {0} Lucene index!";
logger.error(MessageFormat.format(msg, model.name));
}
}
} catch (Throwable t) {
logger.error(MessageFormat.format("Lucene indexing failure for {0}", model.name), t);
}
}
/**
* Close the writer/searcher objects for a repository.
*
* @param repositoryName
*/
public synchronized void close(String repositoryName) {
try {
IndexSearcher searcher = searchers.remove(repositoryName);
if (searcher != null) {
searcher.getIndexReader().close();
}
} catch (Exception e) {
logger.error("Failed to close index searcher for " + repositoryName, e);
}
try {
IndexWriter writer = writers.remove(repositoryName);
if (writer != null) {
writer.close();
}
} catch (Exception e) {
logger.error("Failed to close index writer for " + repositoryName, e);
}
}
/**
* Close all Lucene indexers.
*
*/
public synchronized void close() {
// close all writers
for (String writer : writers.keySet()) {
try {
writers.get(writer).close(true);
} catch (Throwable t) {
logger.error("Failed to close Lucene writer for " + writer, t);
}
}
writers.clear();
// close all searchers
for (String searcher : searchers.keySet()) {
try {
searchers.get(searcher).getIndexReader().close();
} catch (Throwable t) {
logger.error("Failed to close Lucene searcher for " + searcher, t);
}
}
searchers.clear();
}
/**
* Deletes the Lucene index for the specified repository.
*
* @param repositoryName
* @return true, if successful
*/
public boolean deleteIndex(String repositoryName) {
try {
// close any open writer/searcher
close(repositoryName);
// delete the index folder
File repositoryFolder = new File(repositoriesFolder, repositoryName);
File luceneIndex = new File(repositoryFolder, LUCENE_DIR);
if (luceneIndex.exists()) {
org.eclipse.jgit.util.FileUtils.delete(luceneIndex,
org.eclipse.jgit.util.FileUtils.RECURSIVE);
}
// delete the config file
File luceneConfig = new File(repositoryFolder, CONF_FILE);
if (luceneConfig.exists()) {
luceneConfig.delete();
}
return true;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Returns the author for the commit, if this information is available.
*
* @param commit
* @return an author or unknown
*/
private String getAuthor(RevCommit commit) {
String name = "unknown";
try {
name = commit.getAuthorIdent().getName();
if (StringUtils.isEmpty(name)) {
name = commit.getAuthorIdent().getEmailAddress();
}
} catch (NullPointerException n) {
}
return name;
}
/**
* Returns the committer for the commit, if this information is available.
*
* @param commit
* @return an committer or unknown
*/
private String getCommitter(RevCommit commit) {
String name = "unknown";
try {
name = commit.getCommitterIdent().getName();
if (StringUtils.isEmpty(name)) {
name = commit.getCommitterIdent().getEmailAddress();
}
} catch (NullPointerException n) {
}
return name;
}
/**
* Get the tree associated with the given commit.
*
* @param walk
* @param commit
* @return tree
* @throws IOException
*/
private RevTree getTree(final RevWalk walk, final RevCommit commit)
throws IOException {
final RevTree tree = commit.getTree();
if (tree != null) {
return tree;
}
walk.parseHeaders(commit);
return commit.getTree();
}
/**
* Construct a keyname from the branch.
*
* @param branchName
* @return a keyname appropriate for the Git config file format
*/
private String getBranchKey(String branchName) {
return StringUtils.getSHA1(branchName);
}
/**
* Returns the Lucene configuration for the specified repository.
*
* @param repository
* @return a config object
*/
private FileBasedConfig getConfig(Repository repository) {
File file = new File(repository.getDirectory(), CONF_FILE);
FileBasedConfig config = new FileBasedConfig(file, FS.detect());
return config;
}
/**
* Reads the Lucene config file for the repository to check the index
* version. If the index version is different, then rebuild the repository
* index.
*
* @param repository
* @return true of the on-disk index format is different than INDEX_VERSION
*/
private boolean shouldReindex(Repository repository) {
try {
FileBasedConfig config = getConfig(repository);
config.load();
int indexVersion = config.getInt(CONF_INDEX, CONF_VERSION, 0);
// reindex if versions do not match
return indexVersion != INDEX_VERSION;
} catch (Throwable t) {
}
return true;
}
/**
* This completely indexes the repository and will destroy any existing
* index.
*
* @param repositoryName
* @param repository
* @return IndexResult
*/
public IndexResult reindex(RepositoryModel model, Repository repository) {
IndexResult result = new IndexResult();
if (!deleteIndex(model.name)) {
return result;
}
try {
FileBasedConfig config = getConfig(repository);
Set<String> indexedCommits = new TreeSet<String>();
IndexWriter writer = getIndexWriter(model.name);
// build a quick lookup of tags
Map<String, List<String>> tags = new HashMap<String, List<String>>();
for (RefModel tag : JGitUtils.getTags(repository, false, -1)) {
if (!tag.isAnnotatedTag()) {
// skip non-annotated tags
continue;
}
if (!tags.containsKey(tag.getObjectId())) {
tags.put(tag.getReferencedObjectId().getName(), new ArrayList<String>());
}
tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName);
}
ObjectReader reader = repository.newObjectReader();
// get the local branches
List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1);
// sort them by most recently updated
Collections.sort(branches, new Comparator<RefModel>() {
@Override
public int compare(RefModel ref1, RefModel ref2) {
return ref2.getDate().compareTo(ref1.getDate());
}
});
// reorder default branch to first position
RefModel defaultBranch = null;
ObjectId defaultBranchId = JGitUtils.getDefaultBranch(repository);
for (RefModel branch : branches) {
if (branch.getObjectId().equals(defaultBranchId)) {
defaultBranch = branch;
break;
}
}
branches.remove(defaultBranch);
branches.add(0, defaultBranch);
// walk through each branch
for (RefModel branch : branches) {
// if this branch is not specifically indexed then skip
if (!model.indexedBranches.contains(branch.getName())) {
continue;
}
String branchName = branch.getName();
RevWalk revWalk = new RevWalk(reader);
RevCommit tip = revWalk.parseCommit(branch.getObjectId());
String tipId = tip.getId().getName();
String keyName = getBranchKey(branchName);
config.setString(CONF_ALIAS, null, keyName, branchName);
config.setString(CONF_BRANCH, null, keyName, tipId);
// index the blob contents of the tree
TreeWalk treeWalk = new TreeWalk(repository);
treeWalk.addTree(tip.getTree());
treeWalk.setRecursive(true);
Map<String, ObjectId> paths = new TreeMap<String, ObjectId>();
while (treeWalk.next()) {
paths.put(treeWalk.getPathString(), treeWalk.getObjectId(0));
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] tmp = new byte[32767];
RevWalk commitWalk = new RevWalk(reader);
commitWalk.markStart(tip);
RevCommit commit;
while ((paths.size() > 0) && (commit = commitWalk.next()) != null) {
TreeWalk diffWalk = new TreeWalk(reader);
int parentCount = commit.getParentCount();
switch (parentCount) {
case 0:
diffWalk.addTree(new EmptyTreeIterator());
break;
case 1:
diffWalk.addTree(getTree(commitWalk, commit.getParent(0)));
break;
default:
// skip merge commits
continue;
}
diffWalk.addTree(getTree(commitWalk, commit));
diffWalk.setFilter(ANY_DIFF);
diffWalk.setRecursive(true);
while ((paths.size() > 0) && diffWalk.next()) {
String path = diffWalk.getPathString();
if (!paths.containsKey(path)) {
continue;
}
// remove path from set
ObjectId blobId = paths.remove(path);
result.blobCount++;
// index the blob metadata
String blobAuthor = getAuthor(commit);
String blobCommitter = getCommitter(commit);
String blobDate = DateTools.timeToString(commit.getCommitTime() * 1000L,
Resolution.MINUTE);
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), Store.YES, Index.NOT_ANALYZED_NO_NORMS));
doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMIT, commit.getName(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_PATH, path, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, blobDate, Store.YES, Index.NO));
doc.add(new Field(FIELD_AUTHOR, blobAuthor, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMITTER, blobCommitter, Store.YES, Index.ANALYZED));
// determine extension to compare to the extension
// blacklist
String ext = null;
String name = path.toLowerCase();
if (name.indexOf('.') > -1) {
ext = name.substring(name.lastIndexOf('.') + 1);
}
// index the blob content
if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) {
ObjectLoader ldr = repository.open(blobId, Constants.OBJ_BLOB);
InputStream in = ldr.openStream();
int n;
while ((n = in.read(tmp)) > 0) {
os.write(tmp, 0, n);
}
in.close();
byte[] content = os.toByteArray();
String str = new String(content, Constants.CHARACTER_ENCODING);
doc.add(new Field(FIELD_CONTENT, str, Store.YES, Index.ANALYZED));
os.reset();
}
// add the blob to the index
writer.addDocument(doc);
}
}
os.close();
// index the tip commit object
if (indexedCommits.add(tipId)) {
Document doc = createDocument(tip, tags.get(tipId));
doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
writer.addDocument(doc);
result.commitCount += 1;
result.branchCount += 1;
}
// traverse the log and index the previous commit objects
RevWalk historyWalk = new RevWalk(reader);
historyWalk.markStart(historyWalk.parseCommit(tip.getId()));
RevCommit rev;
while ((rev = historyWalk.next()) != null) {
String hash = rev.getId().getName();
if (indexedCommits.add(hash)) {
Document doc = createDocument(rev, tags.get(hash));
doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
writer.addDocument(doc);
result.commitCount += 1;
}
}
}
// finished
reader.release();
// this repository has a gb-issues branch, index all issues
if (IssueUtils.getIssuesBranch(repository) != null) {
List<IssueModel> issues = IssueUtils.getIssues(repository, null);
if (issues.size() > 0) {
result.branchCount += 1;
}
for (IssueModel issue : issues) {
result.issueCount++;
Document doc = createDocument(issue);
writer.addDocument(doc);
}
}
// commit all changes and reset the searcher
config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION);
config.save();
writer.commit();
resetIndexSearcher(model.name);
result.success();
} catch (Exception e) {
logger.error("Exception while reindexing " + model.name, e);
}
return result;
}
/**
* Incrementally update the index with the specified commit for the
* repository.
*
* @param repositoryName
* @param repository
* @param branch
* the fully qualified branch name (e.g. refs/heads/master)
* @param commit
* @return true, if successful
*/
private IndexResult index(String repositoryName, Repository repository,
String branch, RevCommit commit) {
IndexResult result = new IndexResult();
try {
List<PathChangeModel> changedPaths = JGitUtils.getFilesInCommit(repository, commit);
String revDate = DateTools.timeToString(commit.getCommitTime() * 1000L,
Resolution.MINUTE);
IndexWriter writer = getIndexWriter(repositoryName);
for (PathChangeModel path : changedPaths) {
// delete the indexed blob
deleteBlob(repositoryName, branch, path.name);
// re-index the blob
if (!ChangeType.DELETE.equals(path.changeType)) {
result.blobCount++;
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), Store.YES,
Index.NOT_ANALYZED));
doc.add(new Field(FIELD_BRANCH, branch, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMIT, commit.getName(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_PATH, path.path, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, revDate, Store.YES, Index.NO));
doc.add(new Field(FIELD_AUTHOR, getAuthor(commit), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMITTER, getCommitter(commit), Store.YES, Index.ANALYZED));
// determine extension to compare to the extension
// blacklist
String ext = null;
String name = path.name.toLowerCase();
if (name.indexOf('.') > -1) {
ext = name.substring(name.lastIndexOf('.') + 1);
}
if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) {
// read the blob content
String str = JGitUtils.getStringContent(repository, commit.getTree(),
path.path);
doc.add(new Field(FIELD_CONTENT, str, Store.YES, Index.ANALYZED));
writer.addDocument(doc);
}
}
}
writer.commit();
// get any annotated commit tags
List<String> commitTags = new ArrayList<String>();
for (RefModel ref : JGitUtils.getTags(repository, true, -1)) {
if (ref.isAnnotatedTag() && ref.getReferencedObjectId().equals(commit.getId())) {
commitTags.add(ref.displayName);
}
}
// create and write the Lucene document
Document doc = createDocument(commit, commitTags);
doc.add(new Field(FIELD_BRANCH, branch, Store.YES, Index.ANALYZED));
result.commitCount++;
result.success = index(repositoryName, doc);
} catch (Exception e) {
logger.error(MessageFormat.format("Exception while indexing commit {0} in {1}", commit.getId().getName(), repositoryName), e);
}
return result;
}
/**
* Incrementally update the index with the specified issue for the
* repository.
*
* @param repositoryName
* @param issue
* @return true, if successful
*/
public boolean index(String repositoryName, IssueModel issue) {
try {
// delete the old issue from the index, if exists
deleteIssue(repositoryName, issue.id);
Document doc = createDocument(issue);
return index(repositoryName, doc);
} catch (Exception e) {
logger.error(MessageFormat.format("Error while indexing issue {0} in {1}", issue.id, repositoryName), e);
}
return false;
}
/**
* Delete an issue from the repository index.
*
* @param repositoryName
* @param issueId
* @throws Exception
*/
private void deleteIssue(String repositoryName, String issueId) throws Exception {
BooleanQuery query = new BooleanQuery();
Term objectTerm = new Term(FIELD_OBJECT_TYPE, SearchObjectType.issue.name());
query.add(new TermQuery(objectTerm), Occur.MUST);
Term issueidTerm = new Term(FIELD_ISSUE, issueId);
query.add(new TermQuery(issueidTerm), Occur.MUST);
IndexWriter writer = getIndexWriter(repositoryName);
writer.deleteDocuments(query);
writer.commit();
}
/**
* Delete a blob from the specified branch of the repository index.
*
* @param repositoryName
* @param branch
* @param path
* @throws Exception
*/
private void deleteBlob(String repositoryName, String branch, String path) throws Exception {
BooleanQuery query = new BooleanQuery();
Term objectTerm = new Term(FIELD_OBJECT_TYPE, SearchObjectType.blob.name());
query.add(new TermQuery(objectTerm), Occur.MUST);
Term branchTerm = new Term(FIELD_BRANCH, branch);
query.add(new TermQuery(branchTerm), Occur.MUST);
Term pathTerm = new Term(FIELD_PATH, path);
query.add(new TermQuery(pathTerm), Occur.MUST);
IndexWriter writer = getIndexWriter(repositoryName);
writer.deleteDocuments(query);
writer.commit();
}
/**
* Updates a repository index incrementally from the last indexed commits.
*
* @param model
* @param repository
* @return IndexResult
*/
private IndexResult updateIndex(RepositoryModel model, Repository repository) {
IndexResult result = new IndexResult();
try {
FileBasedConfig config = getConfig(repository);
config.load();
// build a quick lookup of annotated tags
Map<String, List<String>> tags = new HashMap<String, List<String>>();
for (RefModel tag : JGitUtils.getTags(repository, false, -1)) {
if (!tag.isAnnotatedTag()) {
// skip non-annotated tags
continue;
}
if (!tags.containsKey(tag.getObjectId())) {
tags.put(tag.getReferencedObjectId().getName(), new ArrayList<String>());
}
tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName);
}
// detect branch deletion
// first assume all branches are deleted and then remove each
// existing branch from deletedBranches during indexing
Set<String> deletedBranches = new TreeSet<String>();
for (String alias : config.getNames(CONF_ALIAS)) {
String branch = config.getString(CONF_ALIAS, null, alias);
deletedBranches.add(branch);
}
// walk through each branches
List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1);
for (RefModel branch : branches) {
String branchName = branch.getName();
// determine if we should skip this branch
if (!IssueUtils.GB_ISSUES.equals(branch)
&& !model.indexedBranches.contains(branch.getName())) {
continue;
}
// remove this branch from the deletedBranches set
deletedBranches.remove(branchName);
// determine last commit
String keyName = getBranchKey(branchName);
String lastCommit = config.getString(CONF_BRANCH, null, keyName);
List<RevCommit> revs;
if (StringUtils.isEmpty(lastCommit)) {
// new branch/unindexed branch, get all commits on branch
revs = JGitUtils.getRevLog(repository, branchName, 0, -1);
} else {
// pre-existing branch, get changes since last commit
revs = JGitUtils.getRevLog(repository, lastCommit, branchName);
}
if (revs.size() > 0) {
result.branchCount += 1;
}
// track the issue ids that we have already indexed
Set<String> indexedIssues = new TreeSet<String>();
// reverse the list of commits so we start with the first commit
Collections.reverse(revs);
for (RevCommit commit : revs) {
if (IssueUtils.GB_ISSUES.equals(branch)) {
// only index an issue once during updateIndex
String issueId = commit.getShortMessage().substring(2).trim();
if (indexedIssues.contains(issueId)) {
continue;
}
indexedIssues.add(issueId);
IssueModel issue = IssueUtils.getIssue(repository, issueId);
if (issue == null) {
// issue was deleted, remove from index
deleteIssue(model.name, issueId);
} else {
// issue was updated
index(model.name, issue);
result.issueCount++;
}
} else {
// index a commit
result.add(index(model.name, repository, branchName, commit));
}
}
// update the config
config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION);
config.setString(CONF_ALIAS, null, keyName, branchName);
config.setString(CONF_BRANCH, null, keyName, branch.getObjectId().getName());
config.save();
}
// the deletedBranches set will normally be empty by this point
// unless a branch really was deleted and no longer exists
if (deletedBranches.size() > 0) {
for (String branch : deletedBranches) {
IndexWriter writer = getIndexWriter(model.name);
writer.deleteDocuments(new Term(FIELD_BRANCH, branch));
writer.commit();
}
}
result.success = true;
} catch (Throwable t) {
logger.error(MessageFormat.format("Exception while updating {0} Lucene index", model.name), t);
}
return result;
}
/**
* Creates a Lucene document from an issue.
*
* @param issue
* @return a Lucene document
*/
private Document createDocument(IssueModel issue) {
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.issue.name(), Store.YES,
Field.Index.NOT_ANALYZED));
doc.add(new Field(FIELD_ISSUE, issue.id, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_BRANCH, IssueUtils.GB_ISSUES, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, DateTools.dateToString(issue.created, Resolution.MINUTE),
Store.YES, Field.Index.NO));
doc.add(new Field(FIELD_AUTHOR, issue.reporter, Store.YES, Index.ANALYZED));
List<String> attachments = new ArrayList<String>();
for (Attachment attachment : issue.getAttachments()) {
attachments.add(attachment.name.toLowerCase());
}
doc.add(new Field(FIELD_ATTACHMENT, StringUtils.flattenStrings(attachments), Store.YES,
Index.ANALYZED));
doc.add(new Field(FIELD_SUMMARY, issue.summary, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_CONTENT, issue.toString(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_LABEL, StringUtils.flattenStrings(issue.getLabels()), Store.YES,
Index.ANALYZED));
return doc;
}
/**
* Creates a Lucene document for a commit
*
* @param commit
* @param tags
* @return a Lucene document
*/
private Document createDocument(RevCommit commit, List<String> tags) {
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.commit.name(), Store.YES,
Index.NOT_ANALYZED));
doc.add(new Field(FIELD_COMMIT, commit.getName(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, DateTools.timeToString(commit.getCommitTime() * 1000L,
Resolution.MINUTE), Store.YES, Index.NO));
doc.add(new Field(FIELD_AUTHOR, getAuthor(commit), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMITTER, getCommitter(commit), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_SUMMARY, commit.getShortMessage(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_CONTENT, commit.getFullMessage(), Store.YES, Index.ANALYZED));
if (!ArrayUtils.isEmpty(tags)) {
doc.add(new Field(FIELD_TAG, StringUtils.flattenStrings(tags), Store.YES, Index.ANALYZED));
}
return doc;
}
/**
* Incrementally index an object for the repository.
*
* @param repositoryName
* @param doc
* @return true, if successful
*/
private boolean index(String repositoryName, Document doc) {
try {
IndexWriter writer = getIndexWriter(repositoryName);
writer.addDocument(doc);
writer.commit();
resetIndexSearcher(repositoryName);
return true;
} catch (Exception e) {
logger.error(MessageFormat.format("Exception while incrementally updating {0} Lucene index", repositoryName), e);
}
return false;
}
private SearchResult createSearchResult(Document doc, float score, int hitId, int totalHits) throws ParseException {
SearchResult result = new SearchResult();
result.hitId = hitId;
result.totalHits = totalHits;
result.score = score;
result.date = DateTools.stringToDate(doc.get(FIELD_DATE));
result.summary = doc.get(FIELD_SUMMARY);
result.author = doc.get(FIELD_AUTHOR);
result.committer = doc.get(FIELD_COMMITTER);
result.type = SearchObjectType.fromName(doc.get(FIELD_OBJECT_TYPE));
result.branch = doc.get(FIELD_BRANCH);
result.commitId = doc.get(FIELD_COMMIT);
result.issueId = doc.get(FIELD_ISSUE);
result.path = doc.get(FIELD_PATH);
if (doc.get(FIELD_TAG) != null) {
result.tags = StringUtils.getStringsFromValue(doc.get(FIELD_TAG));
}
if (doc.get(FIELD_LABEL) != null) {
result.labels = StringUtils.getStringsFromValue(doc.get(FIELD_LABEL));
}
return result;
}
private synchronized void resetIndexSearcher(String repository) throws IOException {
IndexSearcher searcher = searchers.remove(repository);
if (searcher != null) {
searcher.getIndexReader().close();
}
}
/**
* Gets an index searcher for the repository.
*
* @param repository
* @return
* @throws IOException
*/
private IndexSearcher getIndexSearcher(String repository) throws IOException {
IndexSearcher searcher = searchers.get(repository);
if (searcher == null) {
IndexWriter writer = getIndexWriter(repository);
searcher = new IndexSearcher(IndexReader.open(writer, true));
searchers.put(repository, searcher);
}
return searcher;
}
/**
* Gets an index writer for the repository. The index will be created if it
* does not already exist or if forceCreate is specified.
*
* @param repository
* @return an IndexWriter
* @throws IOException
*/
private IndexWriter getIndexWriter(String repository) throws IOException {
IndexWriter indexWriter = writers.get(repository);
File repositoryFolder = new File(repositoriesFolder, repository);
File indexFolder = new File(repositoryFolder, LUCENE_DIR);
Directory directory = FSDirectory.open(indexFolder);
if (indexWriter == null) {
if (!indexFolder.exists()) {
indexFolder.mkdirs();
}
StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION);
IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, analyzer);
config.setOpenMode(OpenMode.CREATE_OR_APPEND);
indexWriter = new IndexWriter(directory, config);
writers.put(repository, indexWriter);
}
return indexWriter;
}
/**
* Searches the specified repositories for the given text or query
*
* @param text
* if the text is null or empty, null is returned
* @param page
* the page number to retrieve. page is 1-indexed.
* @param pageSize
* the number of elements to return for this page
* @param repositories
* a list of repositories to search. if no repositories are
* specified null is returned.
* @return a list of SearchResults in order from highest to the lowest score
*
*/
public List<SearchResult> search(String text, int page, int pageSize, List<String> repositories) {
if (ArrayUtils.isEmpty(repositories)) {
return null;
}
return search(text, page, pageSize, repositories.toArray(new String[0]));
}
/**
* Searches the specified repositories for the given text or query
*
* @param text
* if the text is null or empty, null is returned
* @param page
* the page number to retrieve. page is 1-indexed.
* @param pageSize
* the number of elements to return for this page
* @param repositories
* a list of repositories to search. if no repositories are
* specified null is returned.
* @return a list of SearchResults in order from highest to the lowest score
*
*/
public List<SearchResult> search(String text, int page, int pageSize, String... repositories) {
if (StringUtils.isEmpty(text)) {
return null;
}
if (ArrayUtils.isEmpty(repositories)) {
return null;
}
Set<SearchResult> results = new LinkedHashSet<SearchResult>();
StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION);
try {
// default search checks summary and content
BooleanQuery query = new BooleanQuery();
QueryParser qp;
qp = new QueryParser(LUCENE_VERSION, FIELD_SUMMARY, analyzer);
qp.setAllowLeadingWildcard(true);
query.add(qp.parse(text), Occur.SHOULD);
qp = new QueryParser(LUCENE_VERSION, FIELD_CONTENT, analyzer);
qp.setAllowLeadingWildcard(true);
query.add(qp.parse(text), Occur.SHOULD);
IndexSearcher searcher;
if (repositories.length == 1) {
// single repository search
searcher = getIndexSearcher(repositories[0]);
} else {
// multiple repository search
List<IndexReader> readers = new ArrayList<IndexReader>();
for (String repository : repositories) {
IndexSearcher repositoryIndex = getIndexSearcher(repository);
readers.add(repositoryIndex.getIndexReader());
}
IndexReader[] rdrs = readers.toArray(new IndexReader[readers.size()]);
MultiSourceReader reader = new MultiSourceReader(rdrs);
searcher = new IndexSearcher(reader);
}
Query rewrittenQuery = searcher.rewrite(query);
TopScoreDocCollector collector = TopScoreDocCollector.create(5000, true);
searcher.search(rewrittenQuery, collector);
int offset = Math.max(0, (page - 1) * pageSize);
ScoreDoc[] hits = collector.topDocs(offset, pageSize).scoreDocs;
int totalHits = collector.getTotalHits();
for (int i = 0; i < hits.length; i++) {
int docId = hits[i].doc;
Document doc = searcher.doc(docId);
SearchResult result = createSearchResult(doc, hits[i].score, offset + i + 1, totalHits);
if (repositories.length == 1) {
// single repository search
result.repository = repositories[0];
} else {
// multi-repository search
MultiSourceReader reader = (MultiSourceReader) searcher.getIndexReader();
int index = reader.getSourceIndex(docId);
result.repository = repositories[index];
}
String content = doc.get(FIELD_CONTENT);
result.fragment = getHighlightedFragment(analyzer, query, content, result);
results.add(result);
}
} catch (Exception e) {
logger.error(MessageFormat.format("Exception while searching for {0}", text), e);
}
return new ArrayList<SearchResult>(results);
}
/**
*
* @param analyzer
* @param query
* @param content
* @param result
* @return
* @throws IOException
* @throws InvalidTokenOffsetsException
*/
private String getHighlightedFragment(Analyzer analyzer, Query query,
String content, SearchResult result) throws IOException, InvalidTokenOffsetsException {
if (content == null) {
content = "";
}
int fragmentLength = SearchObjectType.commit == result.type ? 512 : 150;
QueryScorer scorer = new QueryScorer(query, "content");
Fragmenter fragmenter = new SimpleSpanFragmenter(scorer, fragmentLength);
// use an artificial delimiter for the token
String termTag = "!!--[";
String termTagEnd = "]--!!";
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter(termTag, termTagEnd);
Highlighter highlighter = new Highlighter(formatter, scorer);
highlighter.setTextFragmenter(fragmenter);
String [] fragments = highlighter.getBestFragments(analyzer, "content", content, 3);
if (ArrayUtils.isEmpty(fragments)) {
if (SearchObjectType.blob == result.type) {
return "";
}
// clip commit message
String fragment = content;
if (fragment.length() > fragmentLength) {
fragment = fragment.substring(0, fragmentLength) + "...";
}
return "<pre class=\"text\">" + StringUtils.escapeForHtml(fragment, true) + "</pre>";
}
int contentPos = 0;
StringBuilder sb = new StringBuilder();
for (int i = 0, len = fragments.length; i < len; i++) {
String fragment = fragments[i];
String tag = "<pre class=\"text\">";
// resurrect the raw fragment from removing the artificial delimiters
String raw = fragment.replace(termTag, "").replace(termTagEnd, "");
// determine position of the raw fragment in the content
int pos = content.indexOf(raw, contentPos);
// restore complete first line of fragment
int c = pos;
while (c > 0) {
c--;
if (content.charAt(c) == '\n') {
break;
}
}
if (c > 0) {
// inject leading chunk of first fragment line
fragment = content.substring(c + 1, pos) + fragment;
}
if (SearchObjectType.blob == result.type) {
// count lines as offset into the content for this fragment
int line = Math.max(1, StringUtils.countLines(content.substring(0, pos)));
// create fragment tag with line number and language
String lang = "";
String ext = StringUtils.getFileExtension(result.path).toLowerCase();
if (!StringUtils.isEmpty(ext)) {
// maintain leading space!
lang = " lang-" + ext;
}
tag = MessageFormat.format("<pre class=\"prettyprint linenums:{0,number,0}{1}\">", line, lang);
// update offset into content
contentPos = pos + raw.length() + 1;
}
sb.append(tag);
// replace the artificial delimiter with html tags
String html = StringUtils.escapeForHtml(fragment, false);
html = html.replace(termTag, "<span class=\"highlight\">").replace(termTagEnd, "</span>");
sb.append(html);
sb.append("</pre>");
if (i < len - 1) {
sb.append("<span class=\"ellipses\">...</span><br/>");
}
}
return sb.toString();
}
/**
* Simple class to track the results of an index update.
*/
private class IndexResult {
long startTime = System.currentTimeMillis();
long endTime = startTime;
boolean success;
int branchCount;
int commitCount;
int blobCount;
int issueCount;
void add(IndexResult result) {
this.branchCount += result.branchCount;
this.commitCount += result.commitCount;
this.blobCount += result.blobCount;
this.issueCount += result.issueCount;
}
void success() {
success = true;
endTime = System.currentTimeMillis();
}
float duration() {
return (endTime - startTime)/1000f;
}
}
/**
* Custom subclass of MultiReader to identify the source index for a given
* doc id. This would not be necessary of there was a public method to
* obtain this information.
*
*/
private class MultiSourceReader extends MultiReader {
final Method method;
MultiSourceReader(IndexReader[] subReaders) {
super(subReaders);
Method m = null;
try {
m = MultiReader.class.getDeclaredMethod("readerIndex", int.class);
m.setAccessible(true);
} catch (Exception e) {
logger.error("Error getting readerIndex method", e);
}
method = m;
}
int getSourceIndex(int docId) {
int index = -1;
try {
Object o = method.invoke(this, docId);
index = (Integer) o;
} catch (Exception e) {
logger.error("Error getting source index", e);
}
return index;
}
}
}
|
src/com/gitblit/LuceneExecutor.java
|
/*
* Copyright 2012 gitblit.com.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gitblit;
import static org.eclipse.jgit.treewalk.filter.TreeFilter.ANY_DIFF;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.text.MessageFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.DateTools;
import org.apache.lucene.document.DateTools.Resolution;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.queryParser.QueryParser;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Fragmenter;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.Version;
import org.eclipse.jgit.diff.DiffEntry.ChangeType;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectReader;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.treewalk.EmptyTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.util.FS;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.gitblit.Constants.SearchObjectType;
import com.gitblit.models.IssueModel;
import com.gitblit.models.IssueModel.Attachment;
import com.gitblit.models.PathModel.PathChangeModel;
import com.gitblit.models.RefModel;
import com.gitblit.models.RepositoryModel;
import com.gitblit.models.SearchResult;
import com.gitblit.utils.ArrayUtils;
import com.gitblit.utils.IssueUtils;
import com.gitblit.utils.JGitUtils;
import com.gitblit.utils.StringUtils;
/**
* The Lucene executor handles indexing and searching repositories.
*
* @author James Moger
*
*/
public class LuceneExecutor implements Runnable {
private static final int INDEX_VERSION = 2;
private static final String FIELD_OBJECT_TYPE = "type";
private static final String FIELD_ISSUE = "issue";
private static final String FIELD_PATH = "path";
private static final String FIELD_COMMIT = "commit";
private static final String FIELD_BRANCH = "branch";
private static final String FIELD_SUMMARY = "summary";
private static final String FIELD_CONTENT = "content";
private static final String FIELD_AUTHOR = "author";
private static final String FIELD_COMMITTER = "committer";
private static final String FIELD_DATE = "date";
private static final String FIELD_TAG = "tag";
private static final String FIELD_LABEL = "label";
private static final String FIELD_ATTACHMENT = "attachment";
private static final String CONF_FILE = "lucene.conf";
private static final String LUCENE_DIR = "lucene";
private static final String CONF_INDEX = "index";
private static final String CONF_VERSION = "version";
private static final String CONF_ALIAS = "aliases";
private static final String CONF_BRANCH = "branches";
private static final Version LUCENE_VERSION = Version.LUCENE_35;
private final Logger logger = LoggerFactory.getLogger(LuceneExecutor.class);
private final IStoredSettings storedSettings;
private final File repositoriesFolder;
private final Map<String, IndexSearcher> searchers = new ConcurrentHashMap<String, IndexSearcher>();
private final Map<String, IndexWriter> writers = new ConcurrentHashMap<String, IndexWriter>();
private final String luceneIgnoreExtensions = "7z arc arj bin bmp dll doc docx exe gif gz jar jpg lib lzh odg odf odt pdf ppt png so swf xcf xls xlsx zip";
private Set<String> excludedExtensions;
public LuceneExecutor(IStoredSettings settings, File repositoriesFolder) {
this.storedSettings = settings;
this.repositoriesFolder = repositoriesFolder;
}
/**
* Run is executed by the Gitblit executor service. Because this is called
* by an executor service, calls will queue - i.e. there can never be
* concurrent execution of repository index updates.
*/
@Override
public void run() {
// reload the excluded extensions
String exts = storedSettings.getString(Keys.web.luceneIgnoreExtensions, luceneIgnoreExtensions);
excludedExtensions = new TreeSet<String>(StringUtils.getStringsFromValue(exts));
for (String repositoryName: GitBlit.self().getRepositoryList()) {
RepositoryModel model = GitBlit.self().getRepositoryModel(repositoryName);
if (model.hasCommits && !ArrayUtils.isEmpty(model.indexedBranches)) {
Repository repository = GitBlit.self().getRepository(model.name);
index(model, repository);
repository.close();
System.gc();
}
}
}
/**
* Synchronously indexes a repository. This may build a complete index of a
* repository or it may update an existing index.
*
* @param name
* the name of the repository
* @param repository
* the repository object
*/
private void index(RepositoryModel model, Repository repository) {
try {
if (shouldReindex(repository)) {
// (re)build the entire index
IndexResult result = reindex(model, repository);
if (result.success) {
if (result.commitCount > 0) {
String msg = "Built {0} Lucene index from {1} commits and {2} files across {3} branches in {4} secs";
logger.info(MessageFormat.format(msg, model.name, result.commitCount,
result.blobCount, result.branchCount, result.duration()));
}
} else {
String msg = "Could not build {0} Lucene index!";
logger.error(MessageFormat.format(msg, model.name));
}
} else {
// update the index with latest commits
IndexResult result = updateIndex(model, repository);
if (result.success) {
if (result.commitCount > 0) {
String msg = "Updated {0} Lucene index with {1} commits and {2} files across {3} branches in {4} secs";
logger.info(MessageFormat.format(msg, model.name, result.commitCount,
result.blobCount, result.branchCount, result.duration()));
}
} else {
String msg = "Could not update {0} Lucene index!";
logger.error(MessageFormat.format(msg, model.name));
}
}
} catch (Throwable t) {
logger.error(MessageFormat.format("Lucene indexing failure for {0}", model.name), t);
}
}
/**
* Close the writer/searcher objects for a repository.
*
* @param repositoryName
*/
public synchronized void close(String repositoryName) {
try {
IndexSearcher searcher = searchers.remove(repositoryName);
if (searcher != null) {
searcher.getIndexReader().close();
}
} catch (Exception e) {
logger.error("Failed to close index searcher for " + repositoryName, e);
}
try {
IndexWriter writer = writers.remove(repositoryName);
if (writer != null) {
writer.close();
}
} catch (Exception e) {
logger.error("Failed to close index writer for " + repositoryName, e);
}
}
/**
* Close all Lucene indexers.
*
*/
public synchronized void close() {
// close all writers
for (String writer : writers.keySet()) {
try {
writers.get(writer).close(true);
} catch (Throwable t) {
logger.error("Failed to close Lucene writer for " + writer, t);
}
}
writers.clear();
// close all searchers
for (String searcher : searchers.keySet()) {
try {
searchers.get(searcher).getIndexReader().close();
} catch (Throwable t) {
logger.error("Failed to close Lucene searcher for " + searcher, t);
}
}
searchers.clear();
}
/**
* Deletes the Lucene index for the specified repository.
*
* @param repositoryName
* @return true, if successful
*/
public boolean deleteIndex(String repositoryName) {
try {
// close any open writer/searcher
close(repositoryName);
// delete the index folder
File repositoryFolder = new File(repositoriesFolder, repositoryName);
File luceneIndex = new File(repositoryFolder, LUCENE_DIR);
if (luceneIndex.exists()) {
org.eclipse.jgit.util.FileUtils.delete(luceneIndex,
org.eclipse.jgit.util.FileUtils.RECURSIVE);
}
// delete the config file
File luceneConfig = new File(repositoryFolder, CONF_FILE);
if (luceneConfig.exists()) {
luceneConfig.delete();
}
return true;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Returns the author for the commit, if this information is available.
*
* @param commit
* @return an author or unknown
*/
private String getAuthor(RevCommit commit) {
String name = "unknown";
try {
name = commit.getAuthorIdent().getName();
if (StringUtils.isEmpty(name)) {
name = commit.getAuthorIdent().getEmailAddress();
}
} catch (NullPointerException n) {
}
return name;
}
/**
* Returns the committer for the commit, if this information is available.
*
* @param commit
* @return an committer or unknown
*/
private String getCommitter(RevCommit commit) {
String name = "unknown";
try {
name = commit.getCommitterIdent().getName();
if (StringUtils.isEmpty(name)) {
name = commit.getCommitterIdent().getEmailAddress();
}
} catch (NullPointerException n) {
}
return name;
}
/**
* Get the tree associated with the given commit.
*
* @param walk
* @param commit
* @return tree
* @throws IOException
*/
private RevTree getTree(final RevWalk walk, final RevCommit commit)
throws IOException {
final RevTree tree = commit.getTree();
if (tree != null) {
return tree;
}
walk.parseHeaders(commit);
return commit.getTree();
}
/**
* Construct a keyname from the branch.
*
* @param branchName
* @return a keyname appropriate for the Git config file format
*/
private String getBranchKey(String branchName) {
return StringUtils.getSHA1(branchName);
}
/**
* Returns the Lucene configuration for the specified repository.
*
* @param repository
* @return a config object
*/
private FileBasedConfig getConfig(Repository repository) {
File file = new File(repository.getDirectory(), CONF_FILE);
FileBasedConfig config = new FileBasedConfig(file, FS.detect());
return config;
}
/**
* Reads the Lucene config file for the repository to check the index
* version. If the index version is different, then rebuild the repository
* index.
*
* @param repository
* @return true of the on-disk index format is different than INDEX_VERSION
*/
private boolean shouldReindex(Repository repository) {
try {
FileBasedConfig config = getConfig(repository);
config.load();
int indexVersion = config.getInt(CONF_INDEX, CONF_VERSION, 0);
// reindex if versions do not match
return indexVersion != INDEX_VERSION;
} catch (Throwable t) {
}
return true;
}
/**
* This completely indexes the repository and will destroy any existing
* index.
*
* @param repositoryName
* @param repository
* @return IndexResult
*/
public IndexResult reindex(RepositoryModel model, Repository repository) {
IndexResult result = new IndexResult();
if (!deleteIndex(model.name)) {
return result;
}
try {
FileBasedConfig config = getConfig(repository);
Set<String> indexedCommits = new TreeSet<String>();
IndexWriter writer = getIndexWriter(model.name);
// build a quick lookup of tags
Map<String, List<String>> tags = new HashMap<String, List<String>>();
for (RefModel tag : JGitUtils.getTags(repository, false, -1)) {
if (!tag.isAnnotatedTag()) {
// skip non-annotated tags
continue;
}
if (!tags.containsKey(tag.getObjectId())) {
tags.put(tag.getReferencedObjectId().getName(), new ArrayList<String>());
}
tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName);
}
ObjectReader reader = repository.newObjectReader();
// get the local branches
List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1);
// sort them by most recently updated
Collections.sort(branches, new Comparator<RefModel>() {
@Override
public int compare(RefModel ref1, RefModel ref2) {
return ref2.getDate().compareTo(ref1.getDate());
}
});
// reorder default branch to first position
RefModel defaultBranch = null;
ObjectId defaultBranchId = JGitUtils.getDefaultBranch(repository);
for (RefModel branch : branches) {
if (branch.getObjectId().equals(defaultBranchId)) {
defaultBranch = branch;
break;
}
}
branches.remove(defaultBranch);
branches.add(0, defaultBranch);
// walk through each branch
for (RefModel branch : branches) {
// if this branch is not specifically indexed then skip
if (!model.indexedBranches.contains(branch.getName())) {
continue;
}
String branchName = branch.getName();
RevWalk revWalk = new RevWalk(reader);
RevCommit tip = revWalk.parseCommit(branch.getObjectId());
String tipId = tip.getId().getName();
String keyName = getBranchKey(branchName);
config.setString(CONF_ALIAS, null, keyName, branchName);
config.setString(CONF_BRANCH, null, keyName, tipId);
// index the blob contents of the tree
TreeWalk treeWalk = new TreeWalk(repository);
treeWalk.addTree(tip.getTree());
treeWalk.setRecursive(true);
Map<String, ObjectId> paths = new TreeMap<String, ObjectId>();
while (treeWalk.next()) {
paths.put(treeWalk.getPathString(), treeWalk.getObjectId(0));
}
ByteArrayOutputStream os = new ByteArrayOutputStream();
byte[] tmp = new byte[32767];
RevWalk commitWalk = new RevWalk(reader);
commitWalk.markStart(tip);
RevCommit commit;
while ((paths.size() > 0) && (commit = commitWalk.next()) != null) {
TreeWalk diffWalk = new TreeWalk(reader);
int parentCount = commit.getParentCount();
switch (parentCount) {
case 0:
diffWalk.addTree(new EmptyTreeIterator());
break;
case 1:
diffWalk.addTree(getTree(commitWalk, commit.getParent(0)));
break;
default:
// skip merge commits
continue;
}
diffWalk.addTree(getTree(commitWalk, commit));
diffWalk.setFilter(ANY_DIFF);
diffWalk.setRecursive(true);
while ((paths.size() > 0) && diffWalk.next()) {
String path = diffWalk.getPathString();
if (!paths.containsKey(path)) {
continue;
}
// remove path from set
ObjectId blobId = paths.remove(path);
result.blobCount++;
// index the blob metadata
String blobAuthor = getAuthor(commit);
String blobCommitter = getCommitter(commit);
String blobDate = DateTools.timeToString(commit.getCommitTime() * 1000L,
Resolution.MINUTE);
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), Store.YES, Index.NOT_ANALYZED_NO_NORMS));
doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMIT, commit.getName(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_PATH, path, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, blobDate, Store.YES, Index.NO));
doc.add(new Field(FIELD_AUTHOR, blobAuthor, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMITTER, blobCommitter, Store.YES, Index.ANALYZED));
// determine extension to compare to the extension
// blacklist
String ext = null;
String name = path.toLowerCase();
if (name.indexOf('.') > -1) {
ext = name.substring(name.lastIndexOf('.') + 1);
}
// index the blob content
if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) {
ObjectLoader ldr = repository.open(blobId, Constants.OBJ_BLOB);
InputStream in = ldr.openStream();
int n;
while ((n = in.read(tmp)) > 0) {
os.write(tmp, 0, n);
}
in.close();
byte[] content = os.toByteArray();
String str = new String(content, Constants.CHARACTER_ENCODING);
doc.add(new Field(FIELD_CONTENT, str, Store.YES, Index.ANALYZED));
os.reset();
}
// add the blob to the index
writer.addDocument(doc);
}
}
os.close();
// index the tip commit object
if (indexedCommits.add(tipId)) {
Document doc = createDocument(tip, tags.get(tipId));
doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
writer.addDocument(doc);
result.commitCount += 1;
result.branchCount += 1;
}
// traverse the log and index the previous commit objects
RevWalk historyWalk = new RevWalk(reader);
historyWalk.markStart(historyWalk.parseCommit(tip.getId()));
RevCommit rev;
while ((rev = historyWalk.next()) != null) {
String hash = rev.getId().getName();
if (indexedCommits.add(hash)) {
Document doc = createDocument(rev, tags.get(hash));
doc.add(new Field(FIELD_BRANCH, branchName, Store.YES, Index.ANALYZED));
writer.addDocument(doc);
result.commitCount += 1;
}
}
}
// finished
reader.release();
// this repository has a gb-issues branch, index all issues
if (IssueUtils.getIssuesBranch(repository) != null) {
List<IssueModel> issues = IssueUtils.getIssues(repository, null);
if (issues.size() > 0) {
result.branchCount += 1;
}
for (IssueModel issue : issues) {
result.issueCount++;
Document doc = createDocument(issue);
writer.addDocument(doc);
}
}
// commit all changes and reset the searcher
config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION);
config.save();
writer.commit();
resetIndexSearcher(model.name);
result.success();
} catch (Exception e) {
logger.error("Exception while reindexing " + model.name, e);
}
return result;
}
/**
* Incrementally update the index with the specified commit for the
* repository.
*
* @param repositoryName
* @param repository
* @param branch
* the fully qualified branch name (e.g. refs/heads/master)
* @param commit
* @return true, if successful
*/
private IndexResult index(String repositoryName, Repository repository,
String branch, RevCommit commit) {
IndexResult result = new IndexResult();
try {
List<PathChangeModel> changedPaths = JGitUtils.getFilesInCommit(repository, commit);
String revDate = DateTools.timeToString(commit.getCommitTime() * 1000L,
Resolution.MINUTE);
IndexWriter writer = getIndexWriter(repositoryName);
for (PathChangeModel path : changedPaths) {
// delete the indexed blob
deleteBlob(repositoryName, branch, path.name);
// re-index the blob
if (!ChangeType.DELETE.equals(path.changeType)) {
result.blobCount++;
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.blob.name(), Store.YES,
Index.NOT_ANALYZED));
doc.add(new Field(FIELD_BRANCH, branch, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMIT, commit.getName(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_PATH, path.path, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, revDate, Store.YES, Index.NO));
doc.add(new Field(FIELD_AUTHOR, getAuthor(commit), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMITTER, getCommitter(commit), Store.YES, Index.ANALYZED));
// determine extension to compare to the extension
// blacklist
String ext = null;
String name = path.name.toLowerCase();
if (name.indexOf('.') > -1) {
ext = name.substring(name.lastIndexOf('.') + 1);
}
if (StringUtils.isEmpty(ext) || !excludedExtensions.contains(ext)) {
// read the blob content
String str = JGitUtils.getStringContent(repository, commit.getTree(),
path.path);
doc.add(new Field(FIELD_CONTENT, str, Store.YES, Index.ANALYZED));
writer.addDocument(doc);
}
}
}
writer.commit();
// get any annotated commit tags
List<String> commitTags = new ArrayList<String>();
for (RefModel ref : JGitUtils.getTags(repository, true, -1)) {
if (ref.isAnnotatedTag() && ref.getReferencedObjectId().equals(commit.getId())) {
commitTags.add(ref.displayName);
}
}
// create and write the Lucene document
Document doc = createDocument(commit, commitTags);
doc.add(new Field(FIELD_BRANCH, branch, Store.YES, Index.ANALYZED));
result.commitCount++;
result.success = index(repositoryName, doc);
} catch (Exception e) {
logger.error(MessageFormat.format("Exception while indexing commit {0} in {1}", commit.getId().getName(), repositoryName), e);
}
return result;
}
/**
* Incrementally update the index with the specified issue for the
* repository.
*
* @param repositoryName
* @param issue
* @return true, if successful
*/
public boolean index(String repositoryName, IssueModel issue) {
try {
// delete the old issue from the index, if exists
deleteIssue(repositoryName, issue.id);
Document doc = createDocument(issue);
return index(repositoryName, doc);
} catch (Exception e) {
logger.error(MessageFormat.format("Error while indexing issue {0} in {1}", issue.id, repositoryName), e);
}
return false;
}
/**
* Delete an issue from the repository index.
*
* @param repositoryName
* @param issueId
* @throws Exception
*/
private void deleteIssue(String repositoryName, String issueId) throws Exception {
BooleanQuery query = new BooleanQuery();
Term objectTerm = new Term(FIELD_OBJECT_TYPE, SearchObjectType.issue.name());
query.add(new TermQuery(objectTerm), Occur.MUST);
Term issueidTerm = new Term(FIELD_ISSUE, issueId);
query.add(new TermQuery(issueidTerm), Occur.MUST);
IndexWriter writer = getIndexWriter(repositoryName);
writer.deleteDocuments(query);
writer.commit();
}
/**
* Delete a blob from the specified branch of the repository index.
*
* @param repositoryName
* @param branch
* @param path
* @throws Exception
*/
private void deleteBlob(String repositoryName, String branch, String path) throws Exception {
BooleanQuery query = new BooleanQuery();
Term objectTerm = new Term(FIELD_OBJECT_TYPE, SearchObjectType.blob.name());
query.add(new TermQuery(objectTerm), Occur.MUST);
Term branchTerm = new Term(FIELD_BRANCH, branch);
query.add(new TermQuery(branchTerm), Occur.MUST);
Term pathTerm = new Term(FIELD_PATH, path);
query.add(new TermQuery(pathTerm), Occur.MUST);
IndexWriter writer = getIndexWriter(repositoryName);
writer.deleteDocuments(query);
writer.commit();
}
/**
* Updates a repository index incrementally from the last indexed commits.
*
* @param model
* @param repository
* @return IndexResult
*/
private IndexResult updateIndex(RepositoryModel model, Repository repository) {
IndexResult result = new IndexResult();
try {
FileBasedConfig config = getConfig(repository);
config.load();
// build a quick lookup of annotated tags
Map<String, List<String>> tags = new HashMap<String, List<String>>();
for (RefModel tag : JGitUtils.getTags(repository, false, -1)) {
if (!tag.isAnnotatedTag()) {
// skip non-annotated tags
continue;
}
if (!tags.containsKey(tag.getObjectId())) {
tags.put(tag.getReferencedObjectId().getName(), new ArrayList<String>());
}
tags.get(tag.getReferencedObjectId().getName()).add(tag.displayName);
}
// detect branch deletion
// first assume all branches are deleted and then remove each
// existing branch from deletedBranches during indexing
Set<String> deletedBranches = new TreeSet<String>();
for (String alias : config.getNames(CONF_ALIAS)) {
String branch = config.getString(CONF_ALIAS, null, alias);
deletedBranches.add(branch);
}
// walk through each branches
List<RefModel> branches = JGitUtils.getLocalBranches(repository, true, -1);
for (RefModel branch : branches) {
String branchName = branch.getName();
// determine if we should skip this branch
if (!IssueUtils.GB_ISSUES.equals(branch)
&& !model.indexedBranches.contains(branch.getName())) {
continue;
}
// remove this branch from the deletedBranches set
deletedBranches.remove(branchName);
// determine last commit
String keyName = getBranchKey(branchName);
String lastCommit = config.getString(CONF_BRANCH, null, keyName);
List<RevCommit> revs;
if (StringUtils.isEmpty(lastCommit)) {
// new branch/unindexed branch, get all commits on branch
revs = JGitUtils.getRevLog(repository, branchName, 0, -1);
} else {
// pre-existing branch, get changes since last commit
revs = JGitUtils.getRevLog(repository, lastCommit, branchName);
}
if (revs.size() > 0) {
result.branchCount += 1;
}
// track the issue ids that we have already indexed
Set<String> indexedIssues = new TreeSet<String>();
// reverse the list of commits so we start with the first commit
Collections.reverse(revs);
for (RevCommit commit : revs) {
if (IssueUtils.GB_ISSUES.equals(branch)) {
// only index an issue once during updateIndex
String issueId = commit.getShortMessage().substring(2).trim();
if (indexedIssues.contains(issueId)) {
continue;
}
indexedIssues.add(issueId);
IssueModel issue = IssueUtils.getIssue(repository, issueId);
if (issue == null) {
// issue was deleted, remove from index
deleteIssue(model.name, issueId);
} else {
// issue was updated
index(model.name, issue);
result.issueCount++;
}
} else {
// index a commit
result.add(index(model.name, repository, branchName, commit));
}
}
// update the config
config.setInt(CONF_INDEX, null, CONF_VERSION, INDEX_VERSION);
config.setString(CONF_ALIAS, null, keyName, branchName);
config.setString(CONF_BRANCH, null, keyName, branch.getObjectId().getName());
config.save();
}
// the deletedBranches set will normally be empty by this point
// unless a branch really was deleted and no longer exists
if (deletedBranches.size() > 0) {
for (String branch : deletedBranches) {
IndexWriter writer = getIndexWriter(model.name);
writer.deleteDocuments(new Term(FIELD_BRANCH, branch));
writer.commit();
}
}
result.success = true;
} catch (Throwable t) {
logger.error(MessageFormat.format("Exception while updating {0} Lucene index", model.name), t);
}
return result;
}
/**
* Creates a Lucene document from an issue.
*
* @param issue
* @return a Lucene document
*/
private Document createDocument(IssueModel issue) {
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.issue.name(), Store.YES,
Field.Index.NOT_ANALYZED));
doc.add(new Field(FIELD_ISSUE, issue.id, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_BRANCH, IssueUtils.GB_ISSUES, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, DateTools.dateToString(issue.created, Resolution.MINUTE),
Store.YES, Field.Index.NO));
doc.add(new Field(FIELD_AUTHOR, issue.reporter, Store.YES, Index.ANALYZED));
List<String> attachments = new ArrayList<String>();
for (Attachment attachment : issue.getAttachments()) {
attachments.add(attachment.name.toLowerCase());
}
doc.add(new Field(FIELD_ATTACHMENT, StringUtils.flattenStrings(attachments), Store.YES,
Index.ANALYZED));
doc.add(new Field(FIELD_SUMMARY, issue.summary, Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_CONTENT, issue.toString(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_LABEL, StringUtils.flattenStrings(issue.getLabels()), Store.YES,
Index.ANALYZED));
return doc;
}
/**
* Creates a Lucene document for a commit
*
* @param commit
* @param tags
* @return a Lucene document
*/
private Document createDocument(RevCommit commit, List<String> tags) {
Document doc = new Document();
doc.add(new Field(FIELD_OBJECT_TYPE, SearchObjectType.commit.name(), Store.YES,
Index.NOT_ANALYZED));
doc.add(new Field(FIELD_COMMIT, commit.getName(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_DATE, DateTools.timeToString(commit.getCommitTime() * 1000L,
Resolution.MINUTE), Store.YES, Index.NO));
doc.add(new Field(FIELD_AUTHOR, getAuthor(commit), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_COMMITTER, getCommitter(commit), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_SUMMARY, commit.getShortMessage(), Store.YES, Index.ANALYZED));
doc.add(new Field(FIELD_CONTENT, commit.getFullMessage(), Store.YES, Index.ANALYZED));
if (!ArrayUtils.isEmpty(tags)) {
doc.add(new Field(FIELD_TAG, StringUtils.flattenStrings(tags), Store.YES, Index.ANALYZED));
}
return doc;
}
/**
* Incrementally index an object for the repository.
*
* @param repositoryName
* @param doc
* @return true, if successful
*/
private boolean index(String repositoryName, Document doc) {
try {
IndexWriter writer = getIndexWriter(repositoryName);
writer.addDocument(doc);
writer.commit();
resetIndexSearcher(repositoryName);
return true;
} catch (Exception e) {
logger.error(MessageFormat.format("Exception while incrementally updating {0} Lucene index", repositoryName), e);
}
return false;
}
private SearchResult createSearchResult(Document doc, float score, int hitId, int totalHits) throws ParseException {
SearchResult result = new SearchResult();
result.hitId = hitId;
result.totalHits = totalHits;
result.score = score;
result.date = DateTools.stringToDate(doc.get(FIELD_DATE));
result.summary = doc.get(FIELD_SUMMARY);
result.author = doc.get(FIELD_AUTHOR);
result.committer = doc.get(FIELD_COMMITTER);
result.type = SearchObjectType.fromName(doc.get(FIELD_OBJECT_TYPE));
result.branch = doc.get(FIELD_BRANCH);
result.commitId = doc.get(FIELD_COMMIT);
result.issueId = doc.get(FIELD_ISSUE);
result.path = doc.get(FIELD_PATH);
if (doc.get(FIELD_TAG) != null) {
result.tags = StringUtils.getStringsFromValue(doc.get(FIELD_TAG));
}
if (doc.get(FIELD_LABEL) != null) {
result.labels = StringUtils.getStringsFromValue(doc.get(FIELD_LABEL));
}
return result;
}
private synchronized void resetIndexSearcher(String repository) throws IOException {
IndexSearcher searcher = searchers.remove(repository);
if (searcher != null) {
searcher.getIndexReader().close();
}
}
/**
* Gets an index searcher for the repository.
*
* @param repository
* @return
* @throws IOException
*/
private IndexSearcher getIndexSearcher(String repository) throws IOException {
IndexSearcher searcher = searchers.get(repository);
if (searcher == null) {
IndexWriter writer = getIndexWriter(repository);
searcher = new IndexSearcher(IndexReader.open(writer, true));
searchers.put(repository, searcher);
}
return searcher;
}
/**
* Gets an index writer for the repository. The index will be created if it
* does not already exist or if forceCreate is specified.
*
* @param repository
* @return an IndexWriter
* @throws IOException
*/
private IndexWriter getIndexWriter(String repository) throws IOException {
IndexWriter indexWriter = writers.get(repository);
File repositoryFolder = new File(repositoriesFolder, repository);
File indexFolder = new File(repositoryFolder, LUCENE_DIR);
Directory directory = FSDirectory.open(indexFolder);
if (indexWriter == null) {
if (!indexFolder.exists()) {
indexFolder.mkdirs();
}
StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION);
IndexWriterConfig config = new IndexWriterConfig(LUCENE_VERSION, analyzer);
config.setOpenMode(OpenMode.CREATE_OR_APPEND);
indexWriter = new IndexWriter(directory, config);
writers.put(repository, indexWriter);
}
return indexWriter;
}
/**
* Searches the specified repositories for the given text or query
*
* @param text
* if the text is null or empty, null is returned
* @param page
* the page number to retrieve. page is 1-indexed.
* @param pageSize
* the number of elements to return for this page
* @param repositories
* a list of repositories to search. if no repositories are
* specified null is returned.
* @return a list of SearchResults in order from highest to the lowest score
*
*/
public List<SearchResult> search(String text, int page, int pageSize, List<String> repositories) {
if (ArrayUtils.isEmpty(repositories)) {
return null;
}
return search(text, page, pageSize, repositories.toArray(new String[0]));
}
/**
* Searches the specified repositories for the given text or query
*
* @param text
* if the text is null or empty, null is returned
* @param page
* the page number to retrieve. page is 1-indexed.
* @param pageSize
* the number of elements to return for this page
* @param repositories
* a list of repositories to search. if no repositories are
* specified null is returned.
* @return a list of SearchResults in order from highest to the lowest score
*
*/
public List<SearchResult> search(String text, int page, int pageSize, String... repositories) {
if (StringUtils.isEmpty(text)) {
return null;
}
if (ArrayUtils.isEmpty(repositories)) {
return null;
}
Set<SearchResult> results = new LinkedHashSet<SearchResult>();
StandardAnalyzer analyzer = new StandardAnalyzer(LUCENE_VERSION);
try {
// default search checks summary and content
BooleanQuery query = new BooleanQuery();
QueryParser qp;
qp = new QueryParser(LUCENE_VERSION, FIELD_SUMMARY, analyzer);
qp.setAllowLeadingWildcard(true);
query.add(qp.parse(text), Occur.SHOULD);
qp = new QueryParser(LUCENE_VERSION, FIELD_CONTENT, analyzer);
qp.setAllowLeadingWildcard(true);
query.add(qp.parse(text), Occur.SHOULD);
IndexSearcher searcher;
if (repositories.length == 1) {
// single repository search
searcher = getIndexSearcher(repositories[0]);
} else {
// multiple repository search
List<IndexReader> readers = new ArrayList<IndexReader>();
for (String repository : repositories) {
IndexSearcher repositoryIndex = getIndexSearcher(repository);
readers.add(repositoryIndex.getIndexReader());
}
IndexReader[] rdrs = readers.toArray(new IndexReader[readers.size()]);
MultiSourceReader reader = new MultiSourceReader(rdrs);
searcher = new IndexSearcher(reader);
}
Query rewrittenQuery = searcher.rewrite(query);
TopScoreDocCollector collector = TopScoreDocCollector.create(5000, true);
searcher.search(rewrittenQuery, collector);
int offset = Math.max(0, (page - 1) * pageSize);
ScoreDoc[] hits = collector.topDocs(offset, pageSize).scoreDocs;
int totalHits = collector.getTotalHits();
for (int i = 0; i < hits.length; i++) {
int docId = hits[i].doc;
Document doc = searcher.doc(docId);
SearchResult result = createSearchResult(doc, hits[i].score, offset + i + 1, totalHits);
if (repositories.length == 1) {
// single repository search
result.repository = repositories[0];
} else {
// multi-repository search
MultiSourceReader reader = (MultiSourceReader) searcher.getIndexReader();
int index = reader.getSourceIndex(docId);
result.repository = repositories[index];
}
String content = doc.get(FIELD_CONTENT);
result.fragment = getHighlightedFragment(analyzer, query, content, result);
results.add(result);
}
} catch (Exception e) {
logger.error(MessageFormat.format("Exception while searching for {0}", text), e);
}
return new ArrayList<SearchResult>(results);
}
/**
*
* @param analyzer
* @param query
* @param content
* @param result
* @return
* @throws IOException
* @throws InvalidTokenOffsetsException
*/
private String getHighlightedFragment(Analyzer analyzer, Query query,
String content, SearchResult result) throws IOException, InvalidTokenOffsetsException {
if (content == null) {
content = "";
}
int fragmentLength = SearchObjectType.commit == result.type ? 512 : 150;
QueryScorer scorer = new QueryScorer(query, "content");
Fragmenter fragmenter = new SimpleSpanFragmenter(scorer, fragmentLength);
// use an artificial delimiter for the token
String termTag = "!!--[";
String termTagEnd = "]--!!";
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter(termTag, termTagEnd);
Highlighter highlighter = new Highlighter(formatter, scorer);
highlighter.setTextFragmenter(fragmenter);
String [] fragments = highlighter.getBestFragments(analyzer, "content", content, 3);
if (ArrayUtils.isEmpty(fragments)) {
if (SearchObjectType.blob == result.type) {
return "";
}
// clip commit message
String fragment = content;
if (fragment.length() > fragmentLength) {
fragment = fragment.substring(0, fragmentLength) + "...";
}
return "<pre class=\"text\">" + StringUtils.escapeForHtml(fragment, true) + "</pre>";
}
int contentPos = 0;
StringBuilder sb = new StringBuilder();
for (int i = 0, len = fragments.length; i < len; i++) {
String fragment = fragments[i];
String tag = "<pre class=\"text\">";
// resurrect the raw fragment from removing the artificial delimiters
String raw = fragment.replace(termTag, "").replace(termTagEnd, "");
// determine position of the raw fragment in the content
int pos = content.indexOf(raw, contentPos);
// restore complete first line of fragment
int c = pos;
while (c > 0) {
c--;
if (content.charAt(c) == '\n') {
break;
}
}
if (c > 0) {
// inject leading chunk of first fragment line
fragment = content.substring(c + 1, pos) + fragment;
}
if (SearchObjectType.blob == result.type) {
// count lines as offset into the content for this fragment
int line = Math.max(1, StringUtils.countLines(content.substring(0, pos)));
// create fragment tag with line number and language
String lang = "";
String ext = StringUtils.getFileExtension(result.path).toLowerCase();
if (!StringUtils.isEmpty(ext)) {
// maintain leading space!
lang = " lang-" + ext;
}
tag = MessageFormat.format("<pre class=\"prettyprint linenums:{0,number,0}{1}\">", line, lang);
// update offset into content
contentPos = pos + raw.length() + 1;
}
sb.append(tag);
// replace the artificial delimiter with html tags
String html = StringUtils.escapeForHtml(fragment, false);
html = html.replace(termTag, "<span class=\"highlight\">").replace(termTagEnd, "</span>");
sb.append(html);
sb.append("</pre>");
if (i < len - 1) {
sb.append("<span class=\"ellipses\">...</span><br/>");
}
}
return sb.toString();
}
/**
* Simple class to track the results of an index update.
*/
private class IndexResult {
long startTime = System.currentTimeMillis();
long endTime = startTime;
boolean success;
int branchCount;
int commitCount;
int blobCount;
int issueCount;
void add(IndexResult result) {
this.branchCount += result.branchCount;
this.commitCount += result.commitCount;
this.blobCount += result.blobCount;
this.issueCount += result.issueCount;
}
void success() {
success = true;
endTime = System.currentTimeMillis();
}
float duration() {
return (endTime - startTime)/1000f;
}
}
/**
* Custom subclass of MultiReader to identify the source index for a given
* doc id. This would not be necessary of there was a public method to
* obtain this information.
*
*/
private class MultiSourceReader extends MultiReader {
final Method method;
MultiSourceReader(IndexReader[] subReaders) {
super(subReaders);
Method m = null;
try {
m = MultiReader.class.getDeclaredMethod("readerIndex", int.class);
m.setAccessible(true);
} catch (Exception e) {
logger.error("Error getting readerIndex method", e);
}
method = m;
}
int getSourceIndex(int docId) {
int index = -1;
try {
Object o = method.invoke(this, docId);
index = (Integer) o;
} catch (Exception e) {
logger.error("Error getting source index", e);
}
return index;
}
}
}
|
Fixed null pointer for LuceneExecutor unit tests
|
src/com/gitblit/LuceneExecutor.java
|
Fixed null pointer for LuceneExecutor unit tests
|
|
Java
|
apache-2.0
|
7bfcf11d91af80a6d69dac0377891cf4da73ebb6
| 0
|
cloud-software-foundation/c5,cloud-software-foundation/c5,cloud-software-foundation/c5,cloud-software-foundation/c5,cloud-software-foundation/c5
|
/*
* Copyright (C) 2014 Ohm Data
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package c5db.replication;
import c5db.interfaces.ReplicationModule;
import c5db.log.ReplicatorLog;
import c5db.replication.generated.AppendEntries;
import c5db.replication.generated.AppendEntriesReply;
import c5db.replication.generated.LogEntry;
import c5db.replication.generated.RequestVote;
import c5db.replication.generated.RequestVoteReply;
import c5db.replication.rpc.RpcReply;
import c5db.replication.rpc.RpcRequest;
import c5db.replication.rpc.RpcWireReply;
import c5db.replication.rpc.RpcWireRequest;
import c5db.util.FiberOnly;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import org.jetlang.channels.AsyncRequest;
import org.jetlang.channels.Channel;
import org.jetlang.channels.MemoryChannel;
import org.jetlang.channels.MemoryRequestChannel;
import org.jetlang.channels.Request;
import org.jetlang.channels.RequestChannel;
import org.jetlang.core.Disposable;
import org.jetlang.fibers.Fiber;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import static c5db.interfaces.ReplicationModule.ReplicatorInstanceEvent;
/**
* Single instantiation of a replicator / log / lease. This implementation's logic is based on the
* RAFT algorithm (see <a href="http://raftconsensus.github.io/">http://raftconsensus.github.io/</a>.
* <p/>
* A ReplicatorInstance handles the consensus and replication for a single quorum, and communicates
* with the log package via {@link c5db.log.ReplicatorLog}.
*/
public class ReplicatorInstance implements ReplicationModule.Replicator {
private static final Logger LOG = LoggerFactory.getLogger(ReplicatorInstance.class);
@Override
public String toString() {
return "ReplicatorInstance{" +
"myId=" + myId +
", quorumId='" + quorumId + '\'' +
", lastCommittedIndex=" + lastCommittedIndex +
", myState=" + myState +
", currentTerm=" + currentTerm +
", votedFor=" + votedFor +
", lastRPC=" + lastRPC +
'}';
}
private final MemoryChannel<State> stateMemoryChannel = new MemoryChannel<>();
public RequestChannel<RpcWireRequest, RpcReply> getIncomingChannel() {
return incomingChannel;
}
private final RequestChannel<RpcRequest, RpcWireReply> sendRpcChannel;
private final RequestChannel<RpcWireRequest, RpcReply> incomingChannel = new MemoryRequestChannel<>();
private final Channel<ReplicatorInstanceEvent> stateChangeChannel;
private final Channel<ReplicationModule.IndexCommitNotice> commitNoticeChannel;
/**
* ******* final fields ************
*/
private final Fiber fiber;
private final long myId;
private final String quorumId;
private final ImmutableList<Long> peers;
/**
* *** These next few fields are used when we are a leader ******
*/
// this is the next index from our log we need to send to each peer, kept track of on a per-peer basis.
private HashMap<Long, Long> peersNextIndex;
// The last succesfully acked message from our peers. I also keep track of my own acked log messages in here.
private HashMap<Long, Long> peersLastAckedIndex;
private long myFirstIndexAsLeader;
private long lastCommittedIndex;
@Override
public String getQuorumId() {
return quorumId;
}
private static class IntLogRequest {
public final List<ByteBuffer> data;
public final SettableFuture<Long> logNumberNotifation;
private IntLogRequest(List<ByteBuffer> data) {
this.data = data;
this.logNumberNotifation = SettableFuture.create();
}
}
private final BlockingQueue<IntLogRequest> logRequests = new ArrayBlockingQueue<>(100);
State myState = State.FOLLOWER;
// In theory these are persistent:
long currentTerm;
long votedFor;
// Election timers, etc.
private long lastRPC;
private long myElectionTimeout;
private long whosLeader = 0;
private Disposable electionChecker;
private final ReplicatorLog log;
final ReplicatorInformationInterface info;
final ReplicatorInfoPersistence persister;
public ReplicatorInstance(final Fiber fiber,
final long myId,
final String quorumId,
List<Long> peers,
ReplicatorLog log,
ReplicatorInformationInterface info,
ReplicatorInfoPersistence persister,
RequestChannel<RpcRequest, RpcWireReply> sendRpcChannel,
final Channel<ReplicatorInstanceEvent> stateChangeChannel,
final Channel<ReplicationModule.IndexCommitNotice> commitNoticeChannel) {
this.fiber = fiber;
this.myId = myId;
this.quorumId = quorumId;
this.peers = ImmutableList.copyOf(peers);
this.sendRpcChannel = sendRpcChannel;
this.log = log;
this.info = info;
this.persister = persister;
this.stateChangeChannel = stateChangeChannel;
this.commitNoticeChannel = commitNoticeChannel;
Random r = new Random();
this.myElectionTimeout = r.nextInt((int) info.electionTimeout()) + info.electionTimeout();
this.lastRPC = info.currentTimeMillis();
this.lastCommittedIndex = 0;
assert this.peers.contains(this.myId);
fiber.execute(() -> {
try {
readPersistentData();
// indicate we are running!
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.QUORUM_START,
ReplicatorInstance.this,
0,
info.currentTimeMillis(),
null)
);
} catch (IOException e) {
LOG.error("{} {} error during persistent data init {}", quorumId, myId, e);
failReplicatorInstance(e);
}
});
incomingChannel.subscribe(fiber, this::onIncomingMessage);
electionChecker = fiber.scheduleWithFixedDelay(this::checkOnElection, info.electionCheckRate(),
info.electionCheckRate(), TimeUnit.MILLISECONDS);
LOG.debug("{} primed {}", myId, this.quorumId);
}
/**
* Initialize object into the specified state, for testing purposes
*/
ReplicatorInstance(final Fiber fiber,
final long myId,
final String quorumId,
List<Long> peers,
ReplicatorLog log,
ReplicatorInformationInterface info,
ReplicatorInfoPersistence persister,
RequestChannel<RpcRequest, RpcWireReply> sendRpcChannel,
final Channel<ReplicatorInstanceEvent> stateChangeChannel,
final Channel<ReplicationModule.IndexCommitNotice> commitNoticeChannel,
long term,
State state,
long lastCommittedIndex,
long leaderId,
long votedFor) {
this.fiber = fiber;
this.myId = myId;
this.quorumId = quorumId;
this.peers = ImmutableList.copyOf(peers);
this.sendRpcChannel = sendRpcChannel;
this.log = log;
this.info = info;
this.persister = persister;
this.stateChangeChannel = stateChangeChannel;
this.commitNoticeChannel = commitNoticeChannel;
this.myElectionTimeout = info.electionTimeout();
this.lastRPC = info.currentTimeMillis();
assert this.peers.contains(this.myId);
assert votedFor == 0 || this.peers.contains(votedFor);
assert leaderId == 0 || this.peers.contains(leaderId);
incomingChannel.subscribe(fiber, this::onIncomingMessage);
electionChecker = fiber.scheduleWithFixedDelay(this::checkOnElection,
info.electionCheckRate(), info.electionCheckRate(), TimeUnit.MILLISECONDS);
LOG.debug("{} primed {}", myId, this.quorumId);
this.currentTerm = term;
this.myState = state;
this.lastCommittedIndex = lastCommittedIndex;
this.whosLeader = leaderId;
this.votedFor = votedFor;
try {
persister.writeCurrentTermAndVotedFor(quorumId, currentTerm, votedFor);
} catch (IOException e) {
failReplicatorInstance(e);
}
if (state == State.LEADER) {
becomeLeader();
}
}
void failReplicatorInstance(Throwable e) {
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.QUORUM_FAILURE,
this,
0,
info.currentTimeMillis(),
e)
);
fiber.dispose(); // kill us forever.
}
// public API:
@Override
public ListenableFuture<Long> logData(List<ByteBuffer> data) throws InterruptedException {
if (!isLeader()) {
LOG.debug("{} attempted to logData on a non-leader", myId);
return null;
}
IntLogRequest req = new IntLogRequest(data);
logRequests.put(req);
// TODO return the durable notification future?
return req.logNumberNotifation;
}
@FiberOnly
private void readPersistentData() throws IOException {
currentTerm = persister.readCurrentTerm(quorumId);
votedFor = persister.readVotedFor(quorumId);
}
@FiberOnly
private void onIncomingMessage(Request<RpcWireRequest, RpcReply> message) {
RpcWireRequest req = message.getRequest();
if (req.isRequestVoteMessage()) {
doRequestVote(message);
} else if (req.isAppendMessage()) {
doAppendMessage(message);
} else {
LOG.warn("{} Got a message of protobuf type I dont know: {}", myId, req);
}
}
@FiberOnly
private void doRequestVote(Request<RpcWireRequest, RpcReply> message) {
RequestVote msg = message.getRequest().getRequestVoteMessage();
// 1. Return if term < currentTerm (sec 5.1)
if (msg.getTerm() < currentTerm) {
RequestVoteReply m = new RequestVoteReply(currentTerm, false);
RpcReply reply = new RpcReply(m);
message.reply(reply);
return;
}
// 2. if term > currentTerm, currentTerm <- term
if (msg.getTerm() > currentTerm) {
LOG.debug("{} requestVote rpc, pushing forward currentTerm {} to {}", myId, currentTerm, msg.getTerm());
setCurrentTerm(msg.getTerm());
// 2a. Step down if candidate or leader.
if (myState != State.FOLLOWER) {
LOG.debug("{} stepping down to follower, currentTerm: {}", myId, currentTerm);
haltLeader();
}
}
// 3. if votedFor is null (0), or candidateId, and candidate's log
// is at least as complete as local log (sec 5.2, 5.4), grant vote
// and reset election timeout.
boolean vote = false;
if ((log.getLastTerm() <= msg.getLastLogTerm())
&&
log.getLastIndex() <= msg.getLastLogIndex()) {
// we can vote for this because the candidate's log is at least as
// complete as the local log.
if (votedFor == 0 || votedFor == message.getRequest().from) {
setVotedFor(message.getRequest().from);
lastRPC = info.currentTimeMillis();
vote = true;
}
}
LOG.debug("{} sending vote reply to {} vote = {}, voted = {}", myId, message.getRequest().from, votedFor, vote);
RequestVoteReply m = new RequestVoteReply(currentTerm, vote);
RpcReply reply = new RpcReply(m);
message.reply(reply);
}
@FiberOnly
private void doAppendMessage(final Request<RpcWireRequest, RpcReply> request) {
final AppendEntries appendMessage = request.getRequest().getAppendMessage();
// 1. return if term < currentTerm (sec 5.1)
if (appendMessage.getTerm() < currentTerm) {
// TODO is this the correct message reply?
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
return;
}
// 2. if term > currentTerm, set it (sec 5.1)
if (appendMessage.getTerm() > currentTerm) {
setCurrentTerm(appendMessage.getTerm());
}
// 3. Step down if we are a leader or a candidate (sec 5.2, 5.5)
if (myState != State.FOLLOWER) {
haltLeader();
}
// 4. reset election timeout
lastRPC = info.currentTimeMillis();
long theLeader = appendMessage.getLeaderId();
if (whosLeader != theLeader) {
LOG.debug("{} discovered new leader: {}", myId, theLeader);
whosLeader = theLeader;
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.LEADER_ELECTED,
this,
whosLeader,
info.currentTimeMillis(),
null)
);
}
// 5. return failure if log doesn't contain an entry at
// prevLogIndex who's term matches prevLogTerm (sec 5.3)
// if msgPrevLogIndex == 0 -> special case of starting the log!
long msgPrevLogIndex = appendMessage.getPrevLogIndex();
long msgPrevLogTerm = appendMessage.getPrevLogTerm();
if (msgPrevLogIndex != 0 && log.getLogTerm(msgPrevLogIndex) != msgPrevLogTerm) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, log.getLastIndex());
RpcReply reply = new RpcReply(m);
request.reply(reply);
return;
}
if (appendMessage.getEntriesList().isEmpty()) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, true, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
long newCommitIndex = Math.min(appendMessage.getCommitIndex(), log.getLastIndex());
setLastCommittedIndex(newCommitIndex);
return;
}
// 6. if existing entries conflict with new entries, delete all
// existing entries starting with first conflicting entry (sec 5.3)
// nb: The process in which we fix the local log may involve a async log operation, so that is entirely
// hidden up in this future. Note that the process can fail, so we handle that as well.
ListenableFuture<ArrayList<LogEntry>> entriesToCommitFuture = validateAndFixLocalLog(request, appendMessage);
Futures.addCallback(entriesToCommitFuture, new FutureCallback<ArrayList<LogEntry>>() {
@Override
public void onSuccess(ArrayList<LogEntry> entriesToCommit) {
// 7. Append any new entries not already in the log.
ListenableFuture<Boolean> logCommitNotification = log.logEntries(entriesToCommit);
// 8. apply newly committed entries to state machine
// wait for the log to commit before returning message. But do so async.
Futures.addCallback(logCommitNotification, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, true, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
// Notify and mark the last committed index.
long newCommitIndex = Math.min(appendMessage.getCommitIndex(), log.getLastIndex());
setLastCommittedIndex(newCommitIndex);
}
@Override
public void onFailure(Throwable t) {
// TODO A log commit failure is probably a fatal error. Quit the instance?
// TODO better error reporting. A log commit failure will be a serious issue.
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
}
}, fiber);
}
@Override
public void onFailure(Throwable t) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
}
}, fiber);
}
private ListenableFuture<ArrayList<LogEntry>> validateAndFixLocalLog(Request<RpcWireRequest, RpcReply> request,
AppendEntries appendMessage) {
final SettableFuture<ArrayList<LogEntry>> future = SettableFuture.create();
validateAndFixLocalLog0(request, appendMessage, future);
return future;
}
private void validateAndFixLocalLog0(final Request<RpcWireRequest, RpcReply> request,
final AppendEntries appendMessage,
final SettableFuture<ArrayList<LogEntry>> future) {
// 6. if existing entries conflict with new entries, delete all
// existing entries starting with first conflicting entry (sec 5.3)
long nextIndex = log.getLastIndex() + 1;
List<LogEntry> entries = appendMessage.getEntriesList();
ArrayList<LogEntry> entriesToCommit = new ArrayList<>(entries.size());
for (LogEntry entry : entries) {
long entryIndex = entry.getIndex();
if (entryIndex == nextIndex) {
LOG.debug("{} new log entry for idx {} term {}", myId, entryIndex, entry.getTerm());
entriesToCommit.add(entry);
nextIndex++;
continue;
}
if (entryIndex > nextIndex) {
// ok this entry is still beyond the LAST entry, so we have a problem:
LOG.error("{} log entry missing, i expected {} and the next in the message is {}",
myId, nextIndex, entryIndex);
future.setException(new Exception("Log entry missing"));
return;
}
// at this point entryIndex should be <= log.getLastIndex
assert entryIndex < nextIndex;
if (log.getLogTerm(entryIndex) != entry.getTerm()) {
// This is generally expected to be fairly uncommon. To prevent busywaiting on the truncate,
// we basically just redo some work (that ideally shouldn't be too expensive).
// So after this point, we basically return immediately, with a callback schedule.
// conflict:
LOG.debug("{} log conflict at idx {} my term: {} term from leader: {}, truncating log after this point", myId,
entryIndex, log.getLogTerm(entryIndex), entry.getTerm());
// delete this and all subsequent entries:
ListenableFuture<Boolean> truncateResult = log.truncateLog(entryIndex);
Futures.addCallback(truncateResult, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean ignored) {
// Recurse, which involved a little redo work, but at makes this code easier to reason about.
validateAndFixLocalLog0(request, appendMessage, future);
}
@Override
public void onFailure(Throwable t) {
failReplicatorInstance(t);
future.setException(t); // TODO determine if this is the proper thing to do here?
}
}, fiber);
return;
} //else {
// this log entry did NOT conflict we dont need to re-commit this entry.
//}
}
future.set(entriesToCommit);
}
@FiberOnly
private void checkOnElection() {
if (myState == State.LEADER) {
LOG.trace("{} leader during election check.", myId);
return;
}
if (lastRPC + this.myElectionTimeout < info.currentTimeMillis()) {
LOG.trace("{} Timed out checkin on election, try new election", myId);
doElection();
}
}
private int calculateMajority(int peerCount) {
return (int) Math.ceil((peerCount + 1) / 2.0);
}
@FiberOnly
private void doElection() {
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.ELECTION_TIMEOUT,
this,
0,
info.currentTimeMillis(),
null)
);
final int majority = calculateMajority(peers.size());
// Start new election "timer".
lastRPC = info.currentTimeMillis();
// increment term.
setCurrentTerm(currentTerm + 1);
myState = State.CANDIDATE;
RequestVote msg = new RequestVote(currentTerm, myId, log.getLastIndex(), log.getLastTerm());
LOG.debug("{} Starting election for currentTerm: {}", myId, currentTerm);
final long termBeingVotedFor = currentTerm;
final List<Long> votes = new ArrayList<>();
for (long peer : peers) {
RpcRequest req = new RpcRequest(peer, myId, quorumId, msg);
AsyncRequest.withOneReply(fiber, sendRpcChannel, req,
message -> handleElectionReply0(message, termBeingVotedFor, votes, majority),
1, TimeUnit.SECONDS, new RequestVoteTimeout(req, termBeingVotedFor, votes, majority));
}
}
private class RequestVoteTimeout implements Runnable {
public final RpcRequest request;
public final long termBeingVotedFor;
public final List<Long> votes;
public final int majority;
@Override
public String toString() {
return "RequestVoteTimeout{" +
"request=" + request +
", termBeingVotedFor=" + termBeingVotedFor +
", votes=" + votes +
", majority=" + majority +
'}';
}
private RequestVoteTimeout(RpcRequest request, long termBeingVotedFor, List<Long> votes, int majority) {
this.request = request;
this.termBeingVotedFor = termBeingVotedFor;
this.votes = votes;
this.majority = majority;
}
@Override
public void run() {
// If we are no longer a candidate, retrying RequestVote is pointless.
if (myState != State.CANDIDATE) {
return;
}
// Also if the term goes forward somehow, this is also out of date, and drop it.
if (currentTerm > termBeingVotedFor) {
LOG.trace("{} request vote timeout, current term has moved on, abandoning this request", myId);
return;
}
LOG.trace("{} request vote timeout to {}, resending RPC", myId, request.to);
// Note we are using 'this' as the recursive timeout.
AsyncRequest.withOneReply(fiber, sendRpcChannel, request,
message -> handleElectionReply0(message, termBeingVotedFor, votes, majority),
1, TimeUnit.SECONDS, this);
}
}
// RPC callback for timeouts
private void handleElectionReply0(RpcWireReply message, long termBeingVotedFor, List<Long> votes, int majority) {
// if current term has advanced, these replies are stale and should be ignored:
if (message == null) {
LOG.warn("{} got a NULL message reply, that's unfortunate", myId);
return;
}
if (currentTerm > termBeingVotedFor) {
LOG.warn("{} election reply from {}, but currentTerm {} > vote term {}", myId, message.from,
currentTerm, termBeingVotedFor);
return;
}
// if we are no longer a Candidate, election was over, these replies are stale.
if (myState != State.CANDIDATE) {
// we became not, ignore
LOG.warn("{} election reply from {} ignored -> in state {}", myId, message.from, myState);
return;
}
RequestVoteReply reply = message.getRequestVoteReplyMessage();
if (reply.getTerm() > currentTerm) {
LOG.warn("{} election reply from {}, but term {} was not my term {}, updating currentTerm", myId,
message.from, reply.getTerm(), currentTerm);
setCurrentTerm(reply.getTerm());
return;
} else if (reply.getTerm() < currentTerm) {
// huh weird.
LOG.warn("{} election reply from {}, their term {} < currentTerm {}", myId, reply.getTerm(), currentTerm);
}
// did you vote for me?
if (reply.getVoteGranted()) {
// yes!
votes.add(message.from);
}
if (votes.size() >= majority) {
becomeLeader();
}
}
//// Leader timer stuff below
private Disposable queueConsumer;
@FiberOnly
private void haltLeader() {
myState = State.FOLLOWER;
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.LEADER_DEPOSED,
this,
0,
info.currentTimeMillis(),
null)
);
stopQueueConsumer();
}
@FiberOnly
private void stopQueueConsumer() {
if (queueConsumer != null) {
queueConsumer.dispose();
queueConsumer = null;
}
}
private void becomeLeader() {
LOG.warn("{} I AM THE LEADER NOW, commece AppendEntries RPCz term = {}", myId, currentTerm);
myState = State.LEADER;
stateMemoryChannel.publish(State.LEADER);
// Page 7, para 5
long myNextLog = log.getLastIndex() + 1;
peersLastAckedIndex = new HashMap<>(peers.size());
peersNextIndex = new HashMap<>(peers.size() - 1);
for (long peer : peers) {
if (peer == myId) {
continue;
}
peersNextIndex.put(peer, myNextLog);
}
// none so far!
myFirstIndexAsLeader = 0;
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.LEADER_ELECTED,
this,
myId,
info.currentTimeMillis(),
null)
);
startQueueConsumer();
}
@FiberOnly
private void startQueueConsumer() {
queueConsumer = fiber.scheduleAtFixedRate(() -> {
try {
consumeQueue();
} catch (Throwable t) {
failReplicatorInstance(t);
}
}, 0, info.groupCommitDelay(), TimeUnit.MILLISECONDS);
}
@FiberOnly
private void consumeQueue() {
// retrieve as many items as possible. send rpc.
final List<IntLogRequest> reqs = new ArrayList<>();
LOG.trace("{} queue consuming", myId);
while (logRequests.peek() != null) {
reqs.add(logRequests.poll());
}
LOG.trace("{} {} queue items to commit", myId, reqs.size());
final long firstIndexInList = log.getLastIndex() + 1;
final long lastIndexInList = firstIndexInList + reqs.size() - 1;
List<LogEntry> newLogEntries = createLogEntriesFromIntRequests(reqs, firstIndexInList);
leaderLogNewEntries(newLogEntries, lastIndexInList);
assert lastIndexInList == log.getLastIndex();
final long majority = calculateMajority(peers.size());
for (final long peer : peers) {
if (myId == peer) {
continue; // dont send myself messages.
}
// for each peer, figure out how many "back messages" should I send:
final long peerNextIdx = this.peersNextIndex.get(peer);
if (peerNextIdx < firstIndexInList) {
final long moreCount = firstIndexInList - peerNextIdx;
LOG.debug("{} sending {} more log entires to peer {}", myId, moreCount, peer);
// TODO check moreCount is reasonable, and available in log. Otherwise do alternative peer catch up
// TODO alternative peer catchup is by a different process, send message to that then skip sending AppendRpc
// TODO allow for smaller 'catch up' messages so we dont try to create a 400GB sized message.
// TODO cache these extra LogEntry objects so we dont recreate too many of them.
ListenableFuture<List<LogEntry>> peerEntriesFuture = log.getLogEntries(peerNextIdx, firstIndexInList);
Futures.addCallback(peerEntriesFuture, new FutureCallback<List<LogEntry>>() {
@Override
public void onSuccess(List<LogEntry> entriesFromLog) {
// TODO make sure the lists splice neatly together.
assert entriesFromLog.size() == moreCount;
if (peerNextIdx != peersNextIndex.get(peer) ||
myState != State.LEADER) {
// These were the same when we started checking the log, but they're not now -- that means
// things happened while the log was retrieving, so discard this result. This is safe because
// the next (or concurrent) run of consumeQueue has better information.
return;
}
List<LogEntry> entriesToAppend = new ArrayList<>((int) (newLogEntries.size() + moreCount));
entriesToAppend.addAll(entriesFromLog);
entriesToAppend.addAll(newLogEntries);
sendAppendEntries(peer, peerNextIdx, lastIndexInList, majority, entriesToAppend);
}
@Override
public void onFailure(Throwable throwable) {
// Failed to retrieve from local log
// TODO is this situation ever recoverable?
failReplicatorInstance(throwable);
}
}, fiber);
} else {
sendAppendEntries(peer, peerNextIdx, lastIndexInList, majority, newLogEntries);
}
}
}
@FiberOnly
private List<LogEntry> createLogEntriesFromIntRequests(List<IntLogRequest> requests, long firstEntryIndex) {
long idAssigner = firstEntryIndex;
// Build the log entries:
List<LogEntry> newLogEntries = new ArrayList<>(requests.size());
for (IntLogRequest logReq : requests) {
LogEntry entry = new LogEntry(currentTerm, idAssigner, logReq.data);
newLogEntries.add(entry);
if (myFirstIndexAsLeader == 0) {
myFirstIndexAsLeader = idAssigner;
LOG.debug("{} my first index as leader is: {}", myId, myFirstIndexAsLeader);
}
// let the client know what our id is
logReq.logNumberNotifation.set(idAssigner);
idAssigner++;
}
return newLogEntries;
}
@FiberOnly
private void leaderLogNewEntries(List<LogEntry> newLogEntries, long lastIndexInList) {
final ListenableFuture<Boolean> localLogFuture;
if (newLogEntries.isEmpty()) {
return;
}
localLogFuture = log.logEntries(newLogEntries);
Futures.addCallback(localLogFuture, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
assert result != null && result;
peersLastAckedIndex.put(myId, lastIndexInList);
int majority = calculateMajority(peers.size());
calculateLastVisible(majority, lastIndexInList);
}
@Override
public void onFailure(Throwable t) {
// pretty bad.
LOG.error("{} failed to commit to local log {}", myId, t);
}
}, fiber);
}
@FiberOnly
private void sendAppendEntries(long peer, long peerNextIdx, long lastIndexSent, long majority,
final List<LogEntry> entries) {
assert (entries.size() == 0) || (entries.get(0).getIndex() == peerNextIdx);
assert (entries.size() == 0) || (entries.get(entries.size() - 1).getIndex() == lastIndexSent);
final long prevLogIndex = peerNextIdx - 1;
final long prevLogTerm;
if (prevLogIndex == 0) {
prevLogTerm = 0;
} else {
prevLogTerm = log.getLogTerm(prevLogIndex);
}
// catch them up so the next RPC wont over-send old junk.
peersNextIndex.put(peer, lastIndexSent + 1);
AppendEntries msg = new AppendEntries(
currentTerm, myId, prevLogIndex, prevLogTerm,
entries,
lastCommittedIndex
);
RpcRequest request = new RpcRequest(peer, myId, quorumId, msg);
AsyncRequest.withOneReply(fiber, sendRpcChannel, request, message -> {
LOG.trace("{} got a reply {}", myId, message);
boolean wasSuccessful = message.getAppendReplyMessage().getSuccess();
if (!wasSuccessful) {
// This is per Page 7, paragraph 5. "After a rejection, the leader decrements nextIndex and retries"
if (message.getAppendReplyMessage().getMyLastLogEntry() != 0) {
peersNextIndex.put(peer, message.getAppendReplyMessage().getMyLastLogEntry());
} else {
peersNextIndex.put(peer, peerNextIdx - 1);
}
} else {
// we have been successfully acked up to this point.
LOG.trace("{} peer {} acked for {}", myId, peer, lastIndexSent);
peersLastAckedIndex.put(peer, lastIndexSent);
calculateLastVisible(majority, lastIndexSent);
}
}, 5, TimeUnit.SECONDS, () -> {
LOG.trace("{} peer {} timed out", myId, peer);
// Do nothing -> let next timeout handle things.
// This timeout exists just so that we can cancel and clean up stuff in jetlang.
});
}
private void calculateLastVisible(long majority, long lastIndexSent) {
if (lastIndexSent == lastCommittedIndex) {
return; //skip null check basically
}
HashMap<Long, Integer> bucket = new HashMap<>();
for (long lastAcked : peersLastAckedIndex.values()) {
Integer p = bucket.get(lastAcked);
if (p == null) {
bucket.put(lastAcked, 1);
} else {
bucket.put(lastAcked, p + 1);
}
}
long mostAcked = 0;
for (Map.Entry<Long, Integer> e : bucket.entrySet()) {
if (e.getValue() >= majority) {
if (mostAcked != 0) {
LOG.warn("{} strange, found more than 1 'most acked' entry: {} and {}", myId, mostAcked, e.getKey());
}
mostAcked = e.getKey();
}
}
if (mostAcked == 0) {
return;
}
if (myFirstIndexAsLeader == 0) {
return; // cant declare new visible yet until we have a first index as the leader.
}
if (mostAcked < myFirstIndexAsLeader) {
LOG.warn("{} Found most-acked entry {} but my first index as leader was {}, cant declare visible yet", myId, mostAcked, myFirstIndexAsLeader);
return;
}
if (mostAcked < lastCommittedIndex) {
LOG.warn("{} weird mostAcked {} is smaller than lastCommittedIndex {}", myId, mostAcked, lastCommittedIndex);
return;
}
if (mostAcked == lastCommittedIndex) {
return;
}
setLastCommittedIndex(mostAcked);
LOG.trace("{} discovered new visible entry {}", myId, lastCommittedIndex);
// TODO take action and notify clients (pending new system frameworks)
}
private void setLastCommittedIndex(long newLastCommittedIndex) {
if (newLastCommittedIndex < lastCommittedIndex) {
LOG.warn("{} New lastCommittedIndex {} is smaller than previous lastCommittedIndex {}", myId, newLastCommittedIndex, lastCommittedIndex);
} else if (newLastCommittedIndex > lastCommittedIndex) {
lastCommittedIndex = newLastCommittedIndex;
notifyLastCommitted();
}
}
private void notifyLastCommitted() {
commitNoticeChannel.publish(new ReplicationModule.IndexCommitNotice(this, lastCommittedIndex));
}
private void setVotedFor(long votedFor) {
try {
persister.writeCurrentTermAndVotedFor(quorumId, currentTerm, votedFor);
} catch (IOException e) {
failReplicatorInstance(e);
}
this.votedFor = votedFor;
}
private void setCurrentTerm(long newTerm) {
try {
persister.writeCurrentTermAndVotedFor(quorumId, newTerm, 0);
} catch (IOException e) {
failReplicatorInstance(e);
}
this.currentTerm = newTerm;
this.votedFor = 0;
}
@Override
public long getId() {
return myId;
}
public void dispose() {
fiber.dispose();
}
@Override
public boolean isLeader() {
return myState == State.LEADER;
}
@Override
public void start() {
LOG.debug("{} started {} with election timeout {}", myId, this.quorumId, this.myElectionTimeout);
fiber.start();
}
@Override
public Channel<State> getStateChannel() {
return stateMemoryChannel;
}
}
|
c5db/src/main/java/c5db/replication/ReplicatorInstance.java
|
/*
* Copyright (C) 2014 Ohm Data
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package c5db.replication;
import c5db.interfaces.ReplicationModule;
import c5db.log.ReplicatorLog;
import c5db.replication.generated.AppendEntries;
import c5db.replication.generated.AppendEntriesReply;
import c5db.replication.generated.LogEntry;
import c5db.replication.generated.RequestVote;
import c5db.replication.generated.RequestVoteReply;
import c5db.replication.rpc.RpcReply;
import c5db.replication.rpc.RpcRequest;
import c5db.replication.rpc.RpcWireReply;
import c5db.replication.rpc.RpcWireRequest;
import c5db.util.FiberOnly;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import org.jetlang.channels.AsyncRequest;
import org.jetlang.channels.Channel;
import org.jetlang.channels.MemoryChannel;
import org.jetlang.channels.MemoryRequestChannel;
import org.jetlang.channels.Request;
import org.jetlang.channels.RequestChannel;
import org.jetlang.core.Disposable;
import org.jetlang.fibers.Fiber;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import static c5db.interfaces.ReplicationModule.ReplicatorInstanceEvent;
/**
* Single instantiation of a replicator / log / lease. This implementation's logic is based on the
* RAFT algorithm (see <a href="http://raftconsensus.github.io/">http://raftconsensus.github.io/</a>.
* <p/>
* A ReplicatorInstance handles the consensus and replication for a single quorum, and communicates
* with the log package via {@link c5db.log.ReplicatorLog}.
*/
public class ReplicatorInstance implements ReplicationModule.Replicator {
private static final Logger LOG = LoggerFactory.getLogger(ReplicatorInstance.class);
@Override
public String toString() {
return "ReplicatorInstance{" +
"myId=" + myId +
", quorumId='" + quorumId + '\'' +
", lastCommittedIndex=" + lastCommittedIndex +
", myState=" + myState +
", currentTerm=" + currentTerm +
", votedFor=" + votedFor +
", lastRPC=" + lastRPC +
'}';
}
private final MemoryChannel<State> stateMemoryChannel = new MemoryChannel<>();
public RequestChannel<RpcWireRequest, RpcReply> getIncomingChannel() {
return incomingChannel;
}
private final RequestChannel<RpcRequest, RpcWireReply> sendRpcChannel;
private final RequestChannel<RpcWireRequest, RpcReply> incomingChannel = new MemoryRequestChannel<>();
private final Channel<ReplicatorInstanceEvent> stateChangeChannel;
private final Channel<ReplicationModule.IndexCommitNotice> commitNoticeChannel;
/**
* ******* final fields ************
*/
private final Fiber fiber;
private final long myId;
private final String quorumId;
private final ImmutableList<Long> peers;
/**
* *** These next few fields are used when we are a leader ******
*/
// this is the next index from our log we need to send to each peer, kept track of on a per-peer basis.
private HashMap<Long, Long> peersNextIndex;
// The last succesfully acked message from our peers. I also keep track of my own acked log messages in here.
private HashMap<Long, Long> peersLastAckedIndex;
private long myFirstIndexAsLeader;
private long lastCommittedIndex;
@Override
public String getQuorumId() {
return quorumId;
}
private static class IntLogRequest {
public final List<ByteBuffer> data;
public final SettableFuture<Long> logNumberNotifation;
private IntLogRequest(List<ByteBuffer> data) {
this.data = data;
this.logNumberNotifation = SettableFuture.create();
}
}
private final BlockingQueue<IntLogRequest> logRequests = new ArrayBlockingQueue<>(100);
State myState = State.FOLLOWER;
// In theory these are persistent:
long currentTerm;
long votedFor;
// Election timers, etc.
private long lastRPC;
private long myElectionTimeout;
private long whosLeader = 0;
private Disposable electionChecker;
private final ReplicatorLog log;
final ReplicatorInformationInterface info;
final ReplicatorInfoPersistence persister;
public ReplicatorInstance(final Fiber fiber,
final long myId,
final String quorumId,
List<Long> peers,
ReplicatorLog log,
ReplicatorInformationInterface info,
ReplicatorInfoPersistence persister,
RequestChannel<RpcRequest, RpcWireReply> sendRpcChannel,
final Channel<ReplicatorInstanceEvent> stateChangeChannel,
final Channel<ReplicationModule.IndexCommitNotice> commitNoticeChannel) {
this.fiber = fiber;
this.myId = myId;
this.quorumId = quorumId;
this.peers = ImmutableList.copyOf(peers);
this.sendRpcChannel = sendRpcChannel;
this.log = log;
this.info = info;
this.persister = persister;
this.stateChangeChannel = stateChangeChannel;
this.commitNoticeChannel = commitNoticeChannel;
Random r = new Random();
this.myElectionTimeout = r.nextInt((int) info.electionTimeout()) + info.electionTimeout();
this.lastRPC = info.currentTimeMillis();
this.lastCommittedIndex = 0;
assert this.peers.contains(this.myId);
fiber.execute(() -> {
try {
readPersistentData();
// indicate we are running!
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.QUORUM_START,
ReplicatorInstance.this,
0,
info.currentTimeMillis(),
null)
);
} catch (IOException e) {
LOG.error("{} {} error during persistent data init {}", quorumId, myId, e);
failReplicatorInstance(e);
}
});
incomingChannel.subscribe(fiber, this::onIncomingMessage);
electionChecker = fiber.scheduleWithFixedDelay(this::checkOnElection, info.electionCheckRate(),
info.electionCheckRate(), TimeUnit.MILLISECONDS);
LOG.debug("{} primed {}", myId, this.quorumId);
}
/**
* Initialize object into the specified state, for testing purposes
*/
ReplicatorInstance(final Fiber fiber,
final long myId,
final String quorumId,
List<Long> peers,
ReplicatorLog log,
ReplicatorInformationInterface info,
ReplicatorInfoPersistence persister,
RequestChannel<RpcRequest, RpcWireReply> sendRpcChannel,
final Channel<ReplicatorInstanceEvent> stateChangeChannel,
final Channel<ReplicationModule.IndexCommitNotice> commitNoticeChannel,
long term,
State state,
long lastCommittedIndex,
long leaderId,
long votedFor) {
this.fiber = fiber;
this.myId = myId;
this.quorumId = quorumId;
this.peers = ImmutableList.copyOf(peers);
this.sendRpcChannel = sendRpcChannel;
this.log = log;
this.info = info;
this.persister = persister;
this.stateChangeChannel = stateChangeChannel;
this.commitNoticeChannel = commitNoticeChannel;
this.myElectionTimeout = info.electionTimeout();
this.lastRPC = info.currentTimeMillis();
assert this.peers.contains(this.myId);
assert votedFor == 0 || this.peers.contains(votedFor);
assert leaderId == 0 || this.peers.contains(leaderId);
incomingChannel.subscribe(fiber, this::onIncomingMessage);
electionChecker = fiber.scheduleWithFixedDelay(this::checkOnElection,
info.electionCheckRate(), info.electionCheckRate(), TimeUnit.MILLISECONDS);
LOG.debug("{} primed {}", myId, this.quorumId);
this.currentTerm = term;
this.myState = state;
this.lastCommittedIndex = lastCommittedIndex;
this.whosLeader = leaderId;
this.votedFor = votedFor;
try {
persister.writeCurrentTermAndVotedFor(quorumId, currentTerm, votedFor);
} catch (IOException e) {
failReplicatorInstance(e);
}
if (state == State.LEADER) {
becomeLeader();
}
}
void failReplicatorInstance(Throwable e) {
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.QUORUM_FAILURE,
this,
0,
info.currentTimeMillis(),
e)
);
fiber.dispose(); // kill us forever.
}
// public API:
@Override
public ListenableFuture<Long> logData(List<ByteBuffer> data) throws InterruptedException {
if (!isLeader()) {
LOG.debug("{} attempted to logData on a non-leader", myId);
return null;
}
IntLogRequest req = new IntLogRequest(data);
logRequests.put(req);
// TODO return the durable notification future?
return req.logNumberNotifation;
}
@FiberOnly
private void readPersistentData() throws IOException {
currentTerm = persister.readCurrentTerm(quorumId);
votedFor = persister.readVotedFor(quorumId);
}
@FiberOnly
private void onIncomingMessage(Request<RpcWireRequest, RpcReply> message) {
RpcWireRequest req = message.getRequest();
if (req.isRequestVoteMessage()) {
doRequestVote(message);
} else if (req.isAppendMessage()) {
doAppendMessage(message);
} else {
LOG.warn("{} Got a message of protobuf type I dont know: {}", myId, req);
}
}
@FiberOnly
private void doRequestVote(Request<RpcWireRequest, RpcReply> message) {
RequestVote msg = message.getRequest().getRequestVoteMessage();
// 1. Return if term < currentTerm (sec 5.1)
if (msg.getTerm() < currentTerm) {
RequestVoteReply m = new RequestVoteReply(currentTerm, false);
RpcReply reply = new RpcReply(m);
message.reply(reply);
return;
}
// 2. if term > currentTerm, currentTerm <- term
if (msg.getTerm() > currentTerm) {
LOG.debug("{} requestVote rpc, pushing forward currentTerm {} to {}", myId, currentTerm, msg.getTerm());
setCurrentTerm(msg.getTerm());
// 2a. Step down if candidate or leader.
if (myState != State.FOLLOWER) {
LOG.debug("{} stepping down to follower, currentTerm: {}", myId, currentTerm);
haltLeader();
}
}
// 3. if votedFor is null (0), or candidateId, and candidate's log
// is at least as complete as local log (sec 5.2, 5.4), grant vote
// and reset election timeout.
boolean vote = false;
if ((log.getLastTerm() <= msg.getLastLogTerm())
&&
log.getLastIndex() <= msg.getLastLogIndex()) {
// we can vote for this because the candidate's log is at least as
// complete as the local log.
if (votedFor == 0 || votedFor == message.getRequest().from) {
setVotedFor(message.getRequest().from);
lastRPC = info.currentTimeMillis();
vote = true;
}
}
LOG.debug("{} sending vote reply to {} vote = {}, voted = {}", myId, message.getRequest().from, votedFor, vote);
RequestVoteReply m = new RequestVoteReply(currentTerm, vote);
RpcReply reply = new RpcReply(m);
message.reply(reply);
}
@FiberOnly
private void doAppendMessage(final Request<RpcWireRequest, RpcReply> request) {
final AppendEntries appendMessage = request.getRequest().getAppendMessage();
// 1. return if term < currentTerm (sec 5.1)
if (appendMessage.getTerm() < currentTerm) {
// TODO is this the correct message reply?
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
return;
}
// 2. if term > currentTerm, set it (sec 5.1)
if (appendMessage.getTerm() > currentTerm) {
setCurrentTerm(appendMessage.getTerm());
}
// 3. Step down if we are a leader or a candidate (sec 5.2, 5.5)
if (myState != State.FOLLOWER) {
haltLeader();
}
// 4. reset election timeout
lastRPC = info.currentTimeMillis();
long theLeader = appendMessage.getLeaderId();
if (whosLeader != theLeader) {
LOG.debug("{} discovered new leader: {}", myId, theLeader);
whosLeader = theLeader;
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.LEADER_ELECTED,
this,
whosLeader,
info.currentTimeMillis(),
null)
);
}
// 5. return failure if log doesn't contain an entry at
// prevLogIndex who's term matches prevLogTerm (sec 5.3)
// if msgPrevLogIndex == 0 -> special case of starting the log!
long msgPrevLogIndex = appendMessage.getPrevLogIndex();
long msgPrevLogTerm = appendMessage.getPrevLogTerm();
if (msgPrevLogIndex != 0 && log.getLogTerm(msgPrevLogIndex) != msgPrevLogTerm) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, log.getLastIndex());
RpcReply reply = new RpcReply(m);
request.reply(reply);
return;
}
if (appendMessage.getEntriesList().isEmpty()) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, true, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
long newCommitIndex = Math.min(appendMessage.getCommitIndex(), log.getLastIndex());
setLastCommittedIndex(newCommitIndex);
return;
}
// 6. if existing entries conflict with new entries, delete all
// existing entries starting with first conflicting entry (sec 5.3)
// nb: The process in which we fix the local log may involve a async log operation, so that is entirely
// hidden up in this future. Note that the process can fail, so we handle that as well.
ListenableFuture<ArrayList<LogEntry>> entriesToCommitFuture = validateAndFixLocalLog(request, appendMessage);
Futures.addCallback(entriesToCommitFuture, new FutureCallback<ArrayList<LogEntry>>() {
@Override
public void onSuccess(ArrayList<LogEntry> entriesToCommit) {
// 7. Append any new entries not already in the log.
ListenableFuture<Boolean> logCommitNotification = log.logEntries(entriesToCommit);
// 8. apply newly committed entries to state machine
// wait for the log to commit before returning message. But do so async.
Futures.addCallback(logCommitNotification, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, true, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
// Notify and mark the last committed index.
long newCommitIndex = Math.min(appendMessage.getCommitIndex(), log.getLastIndex());
setLastCommittedIndex(newCommitIndex);
}
@Override
public void onFailure(Throwable t) {
// TODO A log commit failure is probably a fatal error. Quit the instance?
// TODO better error reporting. A log commit failure will be a serious issue.
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
}
}, fiber);
}
@Override
public void onFailure(Throwable t) {
AppendEntriesReply m = new AppendEntriesReply(currentTerm, false, 0);
RpcReply reply = new RpcReply(m);
request.reply(reply);
}
}, fiber);
}
private ListenableFuture<ArrayList<LogEntry>> validateAndFixLocalLog(Request<RpcWireRequest, RpcReply> request,
AppendEntries appendMessage) {
final SettableFuture<ArrayList<LogEntry>> future = SettableFuture.create();
validateAndFixLocalLog0(request, appendMessage, future);
return future;
}
private void validateAndFixLocalLog0(final Request<RpcWireRequest, RpcReply> request,
final AppendEntries appendMessage,
final SettableFuture<ArrayList<LogEntry>> future) {
// 6. if existing entries conflict with new entries, delete all
// existing entries starting with first conflicting entry (sec 5.3)
long nextIndex = log.getLastIndex() + 1;
List<LogEntry> entries = appendMessage.getEntriesList();
ArrayList<LogEntry> entriesToCommit = new ArrayList<>(entries.size());
for (LogEntry entry : entries) {
long entryIndex = entry.getIndex();
if (entryIndex == nextIndex) {
LOG.debug("{} new log entry for idx {} term {}", myId, entryIndex, entry.getTerm());
entriesToCommit.add(entry);
nextIndex++;
continue;
}
if (entryIndex > nextIndex) {
// ok this entry is still beyond the LAST entry, so we have a problem:
LOG.error("{} log entry missing, i expected {} and the next in the message is {}",
myId, nextIndex, entryIndex);
future.setException(new Exception("Log entry missing"));
return;
}
// at this point entryIndex should be <= log.getLastIndex
assert entryIndex < nextIndex;
if (log.getLogTerm(entryIndex) != entry.getTerm()) {
// This is generally expected to be fairly uncommon. To prevent busywaiting on the truncate,
// we basically just redo some work (that ideally shouldn't be too expensive).
// So after this point, we basically return immediately, with a callback schedule.
// conflict:
LOG.debug("{} log conflict at idx {} my term: {} term from leader: {}, truncating log after this point", myId,
entryIndex, log.getLogTerm(entryIndex), entry.getTerm());
// delete this and all subsequent entries:
ListenableFuture<Boolean> truncateResult = log.truncateLog(entryIndex);
Futures.addCallback(truncateResult, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean ignored) {
// Recurse, which involved a little redo work, but at makes this code easier to reason about.
validateAndFixLocalLog0(request, appendMessage, future);
}
@Override
public void onFailure(Throwable t) {
failReplicatorInstance(t);
future.setException(t); // TODO determine if this is the proper thing to do here?
}
}, fiber);
return;
} //else {
// this log entry did NOT conflict we dont need to re-commit this entry.
//}
}
future.set(entriesToCommit);
}
@FiberOnly
private void checkOnElection() {
if (myState == State.LEADER) {
LOG.trace("{} leader during election check.", myId);
return;
}
if (lastRPC + this.myElectionTimeout < info.currentTimeMillis()) {
LOG.trace("{} Timed out checkin on election, try new election", myId);
doElection();
}
}
private int calculateMajority(int peerCount) {
return (int) Math.ceil((peerCount + 1) / 2.0);
}
@FiberOnly
private void doElection() {
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.ELECTION_TIMEOUT,
this,
0,
info.currentTimeMillis(),
null)
);
final int majority = calculateMajority(peers.size());
// Start new election "timer".
lastRPC = info.currentTimeMillis();
// increment term.
setCurrentTerm(currentTerm + 1);
myState = State.CANDIDATE;
RequestVote msg = new RequestVote(currentTerm, myId, log.getLastIndex(), log.getLastTerm());
LOG.debug("{} Starting election for currentTerm: {}", myId, currentTerm);
final long termBeingVotedFor = currentTerm;
final List<Long> votes = new ArrayList<>();
for (long peer : peers) {
RpcRequest req = new RpcRequest(peer, myId, quorumId, msg);
AsyncRequest.withOneReply(fiber, sendRpcChannel, req,
message -> handleElectionReply0(message, termBeingVotedFor, votes, majority),
1, TimeUnit.SECONDS, new RequestVoteTimeout(req, termBeingVotedFor, votes, majority));
}
}
private class RequestVoteTimeout implements Runnable {
public final RpcRequest request;
public final long termBeingVotedFor;
public final List<Long> votes;
public final int majority;
@Override
public String toString() {
return "RequestVoteTimeout{" +
"request=" + request +
", termBeingVotedFor=" + termBeingVotedFor +
", votes=" + votes +
", majority=" + majority +
'}';
}
private RequestVoteTimeout(RpcRequest request, long termBeingVotedFor, List<Long> votes, int majority) {
this.request = request;
this.termBeingVotedFor = termBeingVotedFor;
this.votes = votes;
this.majority = majority;
}
@Override
public void run() {
// If we are no longer a candidate, retrying RequestVote is pointless.
if (myState != State.CANDIDATE) {
return;
}
// Also if the term goes forward somehow, this is also out of date, and drop it.
if (currentTerm > termBeingVotedFor) {
LOG.trace("{} request vote timeout, current term has moved on, abandoning this request", myId);
return;
}
LOG.trace("{} request vote timeout to {}, resending RPC", myId, request.to);
// Note we are using 'this' as the recursive timeout.
AsyncRequest.withOneReply(fiber, sendRpcChannel, request,
message -> handleElectionReply0(message, termBeingVotedFor, votes, majority),
1, TimeUnit.SECONDS, this);
}
}
// RPC callback for timeouts
private void handleElectionReply0(RpcWireReply message, long termBeingVotedFor, List<Long> votes, int majority) {
// if current term has advanced, these replies are stale and should be ignored:
if (message == null) {
LOG.warn("{} got a NULL message reply, that's unfortunate", myId);
return;
}
if (currentTerm > termBeingVotedFor) {
LOG.warn("{} election reply from {}, but currentTerm {} > vote term {}", myId, message.from,
currentTerm, termBeingVotedFor);
return;
}
// if we are no longer a Candidate, election was over, these replies are stale.
if (myState != State.CANDIDATE) {
// we became not, ignore
LOG.warn("{} election reply from {} ignored -> in state {}", myId, message.from, myState);
return;
}
RequestVoteReply reply = message.getRequestVoteReplyMessage();
if (reply.getTerm() > currentTerm) {
LOG.warn("{} election reply from {}, but term {} was not my term {}, updating currentTerm", myId,
message.from, reply.getTerm(), currentTerm);
setCurrentTerm(reply.getTerm());
return;
} else if (reply.getTerm() < currentTerm) {
// huh weird.
LOG.warn("{} election reply from {}, their term {} < currentTerm {}", myId, reply.getTerm(), currentTerm);
}
// did you vote for me?
if (reply.getVoteGranted()) {
// yes!
votes.add(message.from);
}
if (votes.size() >= majority) {
becomeLeader();
}
}
//// Leader timer stuff below
private Disposable queueConsumer;
@FiberOnly
private void haltLeader() {
myState = State.FOLLOWER;
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.LEADER_DEPOSED,
this,
0,
info.currentTimeMillis(),
null)
);
stopQueueConsumer();
}
@FiberOnly
private void stopQueueConsumer() {
if (queueConsumer != null) {
queueConsumer.dispose();
queueConsumer = null;
}
}
private void becomeLeader() {
LOG.warn("{} I AM THE LEADER NOW, commece AppendEntries RPCz term = {}", myId, currentTerm);
myState = State.LEADER;
stateMemoryChannel.publish(State.LEADER);
// Page 7, para 5
long myNextLog = log.getLastIndex() + 1;
peersLastAckedIndex = new HashMap<>(peers.size());
peersNextIndex = new HashMap<>(peers.size() - 1);
for (long peer : peers) {
if (peer == myId) {
continue;
}
peersNextIndex.put(peer, myNextLog);
}
// none so far!
myFirstIndexAsLeader = 0;
stateChangeChannel.publish(
new ReplicatorInstanceEvent(
ReplicatorInstanceEvent.EventType.LEADER_ELECTED,
this,
myId,
info.currentTimeMillis(),
null)
);
startQueueConsumer();
}
@FiberOnly
private void startQueueConsumer() {
queueConsumer = fiber.scheduleAtFixedRate(() -> {
try {
consumeQueue();
} catch (Throwable t) {
failReplicatorInstance(t);
}
}, 0, info.groupCommitDelay(), TimeUnit.MILLISECONDS);
}
@FiberOnly
private void consumeQueue() {
// retrieve as many items as possible. send rpc.
final ArrayList<IntLogRequest> reqs = new ArrayList<>();
LOG.trace("{} queue consuming", myId);
while (logRequests.peek() != null) {
reqs.add(logRequests.poll());
}
LOG.trace("{} {} queue items to commit", myId, reqs.size());
final long firstInList = log.getLastIndex() + 1;
long idAssigner = firstInList;
// Get these now BEFORE the log append call.
final long logLastIndex = log.getLastIndex();
final long logLastTerm = log.getLastTerm();
// Build the log entries:
ArrayList<LogEntry> newLogEntries = new ArrayList<>(reqs.size());
for (IntLogRequest logReq : reqs) {
LogEntry entry = new LogEntry(currentTerm, idAssigner, logReq.data);
newLogEntries.add(entry);
if (myFirstIndexAsLeader == 0) {
myFirstIndexAsLeader = idAssigner;
LOG.debug("{} my first index as leader is: {}", myId, myFirstIndexAsLeader);
}
// let the client know what our id is
logReq.logNumberNotifation.set(idAssigner);
idAssigner++;
}
// Should throw immediately if there was a basic validation error.
final ListenableFuture<Boolean> localLogFuture;
if (!newLogEntries.isEmpty()) {
localLogFuture = log.logEntries(newLogEntries);
} else {
localLogFuture = null;
}
// TODO remove one of these i think.
final long largestIndexInBatch = idAssigner - 1;
final long lastIndexSent = log.getLastIndex();
assert lastIndexSent == largestIndexInBatch;
// What a majority means at this moment (in case of reconfigures)
final long majority = calculateMajority(peers.size());
if (localLogFuture != null) {
Futures.addCallback(localLogFuture, new FutureCallback<Boolean>() {
@Override
public void onSuccess(Boolean result) {
assert result != null && result;
peersLastAckedIndex.put(myId, lastIndexSent);
calculateLastVisible(majority, lastIndexSent);
}
@Override
public void onFailure(Throwable t) {
// pretty bad.
LOG.error("{} failed to commit to local log {}", myId, t);
}
}, fiber);
}
for (final long peer : peers) {
if (myId == peer) {
continue; // dont send myself messages.
}
// for each peer, figure out how many "back messages" should I send:
final long peerNextIdx = this.peersNextIndex.get(peer);
if (peerNextIdx < firstInList) {
final long moreCount = firstInList - peerNextIdx;
LOG.debug("{} sending {} more log entires to peer {}", myId, moreCount, peer);
// TODO check moreCount is reasonable, and available in log. Otherwise do alternative peer catch up
// TODO alternative peer catchup is by a different process, send message to that then skip sending AppendRpc
// TODO allow for smaller 'catch up' messages so we dont try to create a 400GB sized message.
// TODO cache these extra LogEntry objects so we dont recreate too many of them.
ListenableFuture<List<LogEntry>> peerEntriesFuture = log.getLogEntries(peerNextIdx, firstInList);
Futures.addCallback(peerEntriesFuture, new FutureCallback<List<LogEntry>>() {
@Override
public void onSuccess(List<LogEntry> entriesFromLog) {
// TODO make sure the lists splice neatly together.
assert entriesFromLog.size() == moreCount;
if (peerNextIdx != peersNextIndex.get(peer) ||
myState != State.LEADER) {
// These were the same when we started checking the log, but they're not now -- that means
// things happened while the log was retrieving, so discard this result. This is safe because
// the next (or concurrent) run of consumeQueue has better information.
return;
}
List<LogEntry> entriesToAppend = new ArrayList<>((int) (newLogEntries.size() + moreCount));
entriesToAppend.addAll(entriesFromLog);
entriesToAppend.addAll(newLogEntries);
sendAppendEntries(peer, peerNextIdx, lastIndexSent, majority, entriesToAppend);
}
@Override
public void onFailure(Throwable throwable) {
// Failed to retrieve from local log
// TODO is this situation ever recoverable?
failReplicatorInstance(throwable);
}
}, fiber);
} else {
sendAppendEntries(peer, peerNextIdx, lastIndexSent, majority, newLogEntries);
}
}
}
@FiberOnly
private void sendAppendEntries(long peer, long peerNextIdx, long lastIndexSent, long majority,
final List<LogEntry> entries) {
assert (entries.size() == 0) || (entries.get(0).getIndex() == peerNextIdx);
assert (entries.size() == 0) || (entries.get(entries.size() - 1).getIndex() == lastIndexSent);
final long prevLogIndex = peerNextIdx - 1;
final long prevLogTerm;
if (prevLogIndex == 0) {
prevLogTerm = 0;
} else {
prevLogTerm = log.getLogTerm(prevLogIndex);
}
// catch them up so the next RPC wont over-send old junk.
peersNextIndex.put(peer, lastIndexSent + 1);
AppendEntries msg = new AppendEntries(
currentTerm, myId, prevLogIndex, prevLogTerm,
entries,
lastCommittedIndex
);
RpcRequest request = new RpcRequest(peer, myId, quorumId, msg);
AsyncRequest.withOneReply(fiber, sendRpcChannel, request, message -> {
LOG.trace("{} got a reply {}", myId, message);
boolean wasSuccessful = message.getAppendReplyMessage().getSuccess();
if (!wasSuccessful) {
// This is per Page 7, paragraph 5. "After a rejection, the leader decrements nextIndex and retries"
if (message.getAppendReplyMessage().getMyLastLogEntry() != 0) {
peersNextIndex.put(peer, message.getAppendReplyMessage().getMyLastLogEntry());
} else {
peersNextIndex.put(peer, peerNextIdx - 1);
}
} else {
// we have been successfully acked up to this point.
LOG.trace("{} peer {} acked for {}", myId, peer, lastIndexSent);
peersLastAckedIndex.put(peer, lastIndexSent);
calculateLastVisible(majority, lastIndexSent);
}
}, 5, TimeUnit.SECONDS, () -> {
LOG.trace("{} peer {} timed out", myId, peer);
// Do nothing -> let next timeout handle things.
// This timeout exists just so that we can cancel and clean up stuff in jetlang.
});
}
private void calculateLastVisible(long majority, long lastIndexSent) {
if (lastIndexSent == lastCommittedIndex) {
return; //skip null check basically
}
HashMap<Long, Integer> bucket = new HashMap<>();
for (long lastAcked : peersLastAckedIndex.values()) {
Integer p = bucket.get(lastAcked);
if (p == null) {
bucket.put(lastAcked, 1);
} else {
bucket.put(lastAcked, p + 1);
}
}
long mostAcked = 0;
for (Map.Entry<Long, Integer> e : bucket.entrySet()) {
if (e.getValue() >= majority) {
if (mostAcked != 0) {
LOG.warn("{} strange, found more than 1 'most acked' entry: {} and {}", myId, mostAcked, e.getKey());
}
mostAcked = e.getKey();
}
}
if (mostAcked == 0) {
return;
}
if (myFirstIndexAsLeader == 0) {
return; // cant declare new visible yet until we have a first index as the leader.
}
if (mostAcked < myFirstIndexAsLeader) {
LOG.warn("{} Found most-acked entry {} but my first index as leader was {}, cant declare visible yet", myId, mostAcked, myFirstIndexAsLeader);
return;
}
if (mostAcked < lastCommittedIndex) {
LOG.warn("{} weird mostAcked {} is smaller than lastCommittedIndex {}", myId, mostAcked, lastCommittedIndex);
return;
}
if (mostAcked == lastCommittedIndex) {
return;
}
setLastCommittedIndex(mostAcked);
LOG.trace("{} discovered new visible entry {}", myId, lastCommittedIndex);
// TODO take action and notify clients (pending new system frameworks)
}
private void setLastCommittedIndex(long newLastCommittedIndex) {
if (newLastCommittedIndex < lastCommittedIndex) {
LOG.warn("{} New lastCommittedIndex {} is smaller than previous lastCommittedIndex {}", myId, newLastCommittedIndex, lastCommittedIndex);
} else if (newLastCommittedIndex > lastCommittedIndex) {
lastCommittedIndex = newLastCommittedIndex;
notifyLastCommitted();
}
}
private void notifyLastCommitted() {
commitNoticeChannel.publish(new ReplicationModule.IndexCommitNotice(this, lastCommittedIndex));
}
private void setVotedFor(long votedFor) {
try {
persister.writeCurrentTermAndVotedFor(quorumId, currentTerm, votedFor);
} catch (IOException e) {
failReplicatorInstance(e);
}
this.votedFor = votedFor;
}
private void setCurrentTerm(long newTerm) {
try {
persister.writeCurrentTermAndVotedFor(quorumId, newTerm, 0);
} catch (IOException e) {
failReplicatorInstance(e);
}
this.currentTerm = newTerm;
this.votedFor = 0;
}
@Override
public long getId() {
return myId;
}
public void dispose() {
fiber.dispose();
}
@Override
public boolean isLeader() {
return myState == State.LEADER;
}
@Override
public void start() {
LOG.debug("{} started {} with election timeout {}", myId, this.quorumId, this.myElectionTimeout);
fiber.start();
}
@Override
public Channel<State> getStateChannel() {
return stateMemoryChannel;
}
}
|
Extract two submethods from consumeQueue() to reduce its length
|
c5db/src/main/java/c5db/replication/ReplicatorInstance.java
|
Extract two submethods from consumeQueue() to reduce its length
|
|
Java
|
apache-2.0
|
8cb5ee96dfd8bac336a4f0fd1be4787d0db3d513
| 0
|
Wilbeibi/cloudapp-mp2,Wilbeibi/cloudapp-mp2,Wilbeibi/cloudapp-mp2
|
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
/*
* The rank of the page is the numberof pages in the league with strictly less
* (not equal) popularity than the original page.
*
* So, first caculate the popularity of each page in league. Then sort by
* popularity, according to index, get the rank.
*/
public class PopularityLeague extends Configured implements Tool {
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new PopularityLeague(), args);
System.exit(res);
}
public static class IntArrayWritable extends ArrayWritable {
public IntArrayWritable() {
super(IntWritable.class);
}
public IntArrayWritable(Integer[] numbers) {
super(IntWritable.class);
IntWritable[] ints = new IntWritable[numbers.length];
for (int i = 0; i < numbers.length; i++) {
ints[i] = new IntWritable(numbers[i]);
}
set(ints);
}
}
@Override
public int run(String[] args) throws Exception {
// TODO
Configuration conf = this.getConf();
FileSystem fs = FileSystem.get(conf);
Path tmpPath = new Path("/mp2/tmp");
fs.delete(tmpPath, true);
Job jobA = Job.getInstance(conf, "Popularity Count");
jobA.setOutputKeyClass(IntWritable.class);
jobA.setOutputValueClass(IntWritable.class);
jobA.setMapOutputKeyClass(IntWritable.class);
jobA.setMapOutputValueClass(IntWritable.class);
jobA.setMapperClass(LinkCountMap.class);
jobA.setReducerClass(LinkCountReduce.class);
FileInputFormat.setInputPaths(jobA, new Path(args[0]));
FileOutputFormat.setOutputPath(jobA, tmpPath);
jobA.setJarByClass(PopularityLeague.class);
jobA.waitForCompletion(true);
Job jobB = Job.getInstance(conf, "Popularity League");
jobB.setOutputKeyClass(IntWritable.class);
jobB.setOutputValueClass(IntWritable.class);
jobB.setMapOutputKeyClass(NullWritable.class);
jobB.setMapOutputValueClass(IntArrayWritable.class);
jobB.setMapperClass(LeagueLinksMapper.class);
jobB.setReducerClass(LeagueLinksReducer.class);
FileInputFormat.setInputPaths(jobB, tmpPath);
FileOutputFormat.setOutputPath(jobB, new Path(args[1]));
jobB.setInputFormatClass(KeyValueTextInputFormat.class);
jobB.setOutputFormatClass(TextOutputFormat.class);
jobB.setJarByClass(PopularityLeague.class);
return jobB.waitForCompletion(true) ? 0 : 1;
}
public static String readHDFSFile(String path, Configuration conf) throws IOException{
Path pt=new Path(path);
FileSystem fs = FileSystem.get(pt.toUri(), conf);
FSDataInputStream file = fs.open(pt);
BufferedReader buffIn=new BufferedReader(new InputStreamReader(file));
StringBuilder everything = new StringBuilder();
String line;
while( (line = buffIn.readLine()) != null) {
everything.append(line);
everything.append("\n");
}
return everything.toString();
}
// TODO
public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> {
Set<String> leagueSet;
@Override
protected void setup(Context context) throws IOException,InterruptedException {
Configuration conf = context.getConfiguration();
String league_file = conf.get("league");
this.leagueSet = new HashSet<String>(Arrays.asList(readHDFSFile(league_file, conf).split("\n")));
}
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String delimiters = " :";
StringTokenizer tokenizer = new StringTokenizer(line, delimiters);
if (tokenizer.hasMoreTokens()) {
// ignore the from
tokenizer.nextToken();
}
while (tokenizer.hasMoreElements()) {
Integer to = Integer.parseInt(tokenizer.nextToken().trim());
if (this.leagueSet.contains(to)) {
context.write(new IntWritable(to), new IntWritable(1));
}
}
}
}
public static class LinkCountReduce extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable> {
@Override
public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
for (IntWritable val: values) {
count += val.get();
}
context.write(key, new IntWritable(count));
}
}
public static class LeagueLinksMapper extends Mapper<Text, Text, NullWritable, IntArrayWritable> {
@Override
public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
Integer[] kv = {Integer.parseInt(key.toString()), Integer.parseInt(value.toString())};
IntArrayWritable val = new IntArrayWritable(kv);
context.write(NullWritable.get(), val);
}
}
public static class LeagueLinksReducer extends Reducer<NullWritable, IntArrayWritable, IntWritable, IntWritable> {
private ArrayList<Pair<Integer, Integer>> league = new ArrayList<Pair<Integer, Integer>>();
@Override
public void reduce(NullWritable key, Iterable<IntArrayWritable> values, Context context) throws IOException, InterruptedException {
for (IntArrayWritable val: values) {
IntWritable[] pair = (IntWritable[]) val.toArray();
Integer link = pair[0].get();
Integer count = pair[1].get();
league.add(new Pair(link, count));
}
if (league.size() == 0) {
return;
}
Collections.sort(league, new Comparator<Pair<Integer, Integer>>() {
@Override
public int compare(Pair<Integer, Integer> a, Pair<Integer, Integer> b) {
return a.second.compareTo(b.second);
}
});
int rank = 0, prev_popularity = league.get(0).second;
context.write(new IntWritable(league.get(0).first), new IntWritable(rank));
for (int i = 1; i < league.size(); i++) {
Pair<Integer, Integer> curr = league.get(i);
if (curr.second > prev_popularity) {
rank = i;
prev_popularity = curr.second;
}
context.write(new IntWritable(curr.first), new IntWritable(rank));
}
}
}
class Pair<A extends Comparable<? super A>,
B extends Comparable<? super B>>
implements Comparable<Pair<A, B>> {
public final A first;
public final B second;
public Pair(A first, B second) {
this.first = first;
this.second = second;
}
public static <A extends Comparable<? super A>,
B extends Comparable<? super B>>
Pair<A, B> of(A first, B second) {
return new Pair<A, B>(first, second);
}
@Override
public int compareTo(Pair<A, B> o) {
int cmp = o == null ? 1 : (this.first).compareTo(o.first);
return cmp == 0 ? (this.second).compareTo(o.second) : cmp;
}
@Override
public int hashCode() {
return 31 * hashcode(first) + hashcode(second);
}
private static int hashcode(Object o) {
return o == null ? 0 : o.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Pair))
return false;
if (this == obj)
return true;
return equal(first, ((Pair<?, ?>) obj).first)
&& equal(second, ((Pair<?, ?>) obj).second);
}
private boolean equal(Object o1, Object o2) {
return o1 == o2 || (o1 != null && o1.equals(o2));
}
@Override
public String toString() {
return "(" + first + ", " + second + ')';
}
}
}
|
PopularityLeague.java
|
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
/*
* The rank of the page is the numberof pages in the league with strictly less
* (not equal) popularity than the original page.
*
* So, first caculate the popularity of each page in league. Then sort by
* popularity, according to index, get the rank.
*/
public class PopularityLeague extends Configured implements Tool {
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new PopularityLeague(), args);
System.exit(res);
}
public static class IntArrayWritable extends ArrayWritable {
public IntArrayWritable() {
super(IntWritable.class);
}
public IntArrayWritable(Integer[] numbers) {
super(IntWritable.class);
IntWritable[] ints = new IntWritable[numbers.length];
for (int i = 0; i < numbers.length; i++) {
ints[i] = new IntWritable(numbers[i]);
}
set(ints);
}
}
@Override
public int run(String[] args) throws Exception {
// TODO
Configuration conf = this.getConf();
FileSystem fs = FileSystem.get(conf);
Path tmpPath = new Path("/mp2/tmp");
fs.delete(tmpPath, true);
Job jobA = Job.getInstance(conf, "Popularity Count");
jobA.setOutputKeyClass(IntWritable.class);
jobA.setOutputValueClass(IntWritable.class);
jobA.setMapOutputKeyClass(IntWritable.class);
jobA.setMapOutputValueClass(IntWritable.class);
jobA.setMapperClass(LinkCountMap.class);
jobA.setReducerClass(LinkCountReduce.class);
FileInputFormat.setInputPaths(jobA, new Path(args[0]));
FileOutputFormat.setOutputPath(jobA, tmpPath);
jobA.setJarByClass(PopularityLeague.class);
jobA.waitForCompletion(true);
Job jobB = Job.getInstance(conf, "Popularity League");
jobB.setOutputKeyClass(IntWritable.class);
jobB.setOutputValueClass(IntWritable.class);
jobB.setMapOutputKeyClass(NullWritable.class);
jobB.setMapOutputValueClass(IntArrayWritable.class);
jobB.setMapperClass(LeagueLinksMapper.class);
jobB.setReducerClass(LeagueLinksReducer.class);
FileInputFormat.setInputPaths(jobB, tmpPath);
FileOutputFormat.setOutputPath(jobB, new Path(args[1]));
jobB.setInputFormatClass(KeyValueTextInputFormat.class);
jobB.setOutputFormatClass(TextOutputFormat.class);
jobB.setJarByClass(PopularityLeague.class);
return jobB.waitForCompletion(true) ? 0 : 1;
}
public static String readHDFSFile(String path, Configuration conf) throws IOException{
Path pt=new Path(path);
FileSystem fs = FileSystem.get(pt.toUri(), conf);
FSDataInputStream file = fs.open(pt);
BufferedReader buffIn=new BufferedReader(new InputStreamReader(file));
StringBuilder everything = new StringBuilder();
String line;
while( (line = buffIn.readLine()) != null) {
everything.append(line);
everything.append("\n");
}
return everything.toString();
}
// TODO
public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> {
Set<String> leagueSet;
@Override
protected void setup(Context context) throws IOException,InterruptedException {
Configuration conf = context.getConfiguration();
String league_file = conf.get("league");
this.leagueSet = new HashSet<String>(Arrays.asList(readHDFSFile(league_file, conf).split("\n")));
}
@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String delimiters = " :";
StringTokenizer tokenizer = new StringTokenizer(line, delimiters);
if (tokenizer.hasMoreTokens()) {
// ignore the from
tokenizer.nextToken();
}
while (tokenizer.hasMoreElements()) {
Integer to = Integer.parseInt(tokenizer.nextToken().trim());
if (this.leagueSet.contains(to)) {
context.write(new IntWritable(to), new IntWritable(1));
}
}
}
}
public static class LinkCountReduce extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable> {
@Override
public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int count = 0;
for (IntWritable val: values) {
count += val.get();
}
context.write(key, new IntWritable(count));
}
}
public static class LeagueLinksMapper extends Mapper<Text, Text, NullWritable, IntArrayWritable> {
@Override
public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
Integer[] kv = {Integer.parseInt(key.toString()), Integer.parseInt(value.toString())};
IntArrayWritable val = new IntArrayWritable(kv);
context.write(NullWritable.get(), val);
}
}
public static class LeagueLinksReducer extends Reducer<NullWritable, IntArrayWritable, IntWritable, IntWritable> {
private ArrayList<Pair<Integer, Integer>> league = new ArrayList<Pair<Integer, Integer>>();
@Override
public void reduce(NullWritable key, Iterable<IntArrayWritable> values, Context context) throws IOException, InterruptedException {
for (IntArrayWritable val: values) {
IntWritable[] pair = (IntWritable[]) val.toArray();
Integer link = pair[0].get();
Integer count = pair[1].get();
league.add(new Pair<Integer, Integer>(link, count));
}
if (league.size() == 0) {
return;
}
Collections.sort(league, new Comparator<Pair<Integer, Integer>>() {
@Override
public int compare(Pair<Integer, Integer> a, Pair<Integer, Integer> b) {
return a.second.compareTo(b.second);
}
});
int rank = 0, prev_popularity = league.get(0).second;
context.write(new IntWritable(league.get(0).first), new IntWritable(rank));
for (int i = 1; i < league.size(); i++) {
Pair<Integer, Integer> curr = league.get(i);
if (curr.second > prev_popularity) {
rank = i;
prev_popularity = curr.second;
}
context.write(new IntWritable(curr.first), new IntWritable(rank));
}
}
}
class Pair<A extends Comparable<? super A>,
B extends Comparable<? super B>>
implements Comparable<Pair<A, B>> {
public final A first;
public final B second;
public Pair(A first, B second) {
this.first = first;
this.second = second;
}
public static <A extends Comparable<? super A>,
B extends Comparable<? super B>>
Pair<A, B> of(A first, B second) {
return new Pair<A, B>(first, second);
}
@Override
public int compareTo(Pair<A, B> o) {
int cmp = o == null ? 1 : (this.first).compareTo(o.first);
return cmp == 0 ? (this.second).compareTo(o.second) : cmp;
}
@Override
public int hashCode() {
return 31 * hashcode(first) + hashcode(second);
}
private static int hashcode(Object o) {
return o == null ? 0 : o.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof Pair))
return false;
if (this == obj)
return true;
return equal(first, ((Pair<?, ?>) obj).first)
&& equal(second, ((Pair<?, ?>) obj).second);
}
private boolean equal(Object o1, Object o2) {
return o1 == o2 || (o1 != null && o1.equals(o2));
}
@Override
public String toString() {
return "(" + first + ", " + second + ')';
}
}
}
|
:kissing_closed_eyes:
|
PopularityLeague.java
|
:kissing_closed_eyes:
|
|
Java
|
apache-2.0
|
67a0c0058020be045f40c78222d489b6eb0c2852
| 0
|
gauravlall/oneops,lkhusid/oneops,okornev/oneops,okornev/oneops,lkhusid/oneops,okornev/oneops,gauravlall/oneops,lkhusid/oneops,gauravlall/oneops,gauravlall/oneops,lkhusid/oneops,lkhusid/oneops,lkhusid/oneops,okornev/oneops,gauravlall/oneops,okornev/oneops,gauravlall/oneops,okornev/oneops
|
/*******************************************************************************
*
* Copyright 2015 Walmart, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package com.oneops.transistor.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.oneops.cms.cm.domain.CmsCI;
import com.oneops.cms.cm.domain.CmsCIAttribute;
import com.oneops.cms.cm.domain.CmsCIRelation;
import com.oneops.cms.cm.domain.CmsCIRelationAttribute;
import com.oneops.cms.cm.domain.CmsLink;
import com.oneops.cms.cm.service.CmsCmProcessor;
import com.oneops.cms.dj.domain.CmsRfcAttribute;
import com.oneops.cms.dj.domain.CmsRfcCI;
import com.oneops.cms.dj.domain.CmsRfcRelation;
import com.oneops.cms.dj.service.CmsCmRfcMrgProcessor;
import com.oneops.cms.dj.service.CmsRfcProcessor;
import com.oneops.cms.exceptions.DJException;
import com.oneops.cms.md.domain.CmsClazz;
import com.oneops.cms.md.domain.CmsClazzAttribute;
import com.oneops.cms.md.domain.CmsRelation;
import com.oneops.cms.md.domain.CmsRelationAttribute;
import com.oneops.cms.md.service.CmsMdProcessor;
import com.oneops.cms.util.CIValidationResult;
import com.oneops.cms.util.CmsDJValidator;
import com.oneops.cms.util.CmsError;
import com.oneops.cms.util.CmsUtil;
import com.oneops.transistor.exceptions.TransistorException;
public class BomRfcBulkProcessor {
static Logger logger = Logger.getLogger(BomRfcBulkProcessor.class);
private static final Map<String, Integer> priorityMap = new HashMap<String, Integer>();
static {
//priorityMap.put("Compute", 2);
//priorityMap.put("Storage", 2);
priorityMap.put("Keypair", 1);
}
private static final int priorityMax = 1;
private static final String BOM_CLOUD_RELATION_NAME = "base.DeployedTo";
private static final String BOM_REALIZED_RELATION_NAME = "base.RealizedAs";
private static final String BOM_DEPENDS_ON_RELATION_NAME = "bom.DependsOn";
private static final String BOM_MANAGED_VIA_RELATION_NAME = "bom.ManagedVia";
private static final int MAX_RECUSION_DEPTH = Integer.valueOf(System.getProperty("com.oneops.transistor.MaxRecursion", "50"));
private static final int MAX_NUM_OF_EDGES = Integer.valueOf(System.getProperty("com.oneops.transistor.MaxEdges", "100000"));
private static final String CONVERGE_RELATION_ATTRIBUTE = "converge";
private CmsCmProcessor cmProcessor;
private CmsMdProcessor mdProcessor;
private CmsRfcProcessor rfcProcessor;
private CmsCmRfcMrgProcessor cmRfcMrgProcessor;
private CmsDJValidator djValidator;
private Gson gson = new Gson();
//private CmsNsManager nsManager;
private TransUtil trUtil;
private CmsUtil cmsUtil;
public void setCmsUtil(CmsUtil cmsUtil) {
this.cmsUtil = cmsUtil;
}
public void setTrUtil(TransUtil trUtil) {
this.trUtil = trUtil;
}
public void setCmProcessor(CmsCmProcessor cmProcessor) {
this.cmProcessor = cmProcessor;
}
public void setMdProcessor(CmsMdProcessor mdProcessor) {
this.mdProcessor = mdProcessor;
}
public void setRfcProcessor(CmsRfcProcessor rfcProcessor) {
this.rfcProcessor = rfcProcessor;
}
public void setCmRfcMrgProcessor(CmsCmRfcMrgProcessor cmRfcMrgProcessor) {
this.cmRfcMrgProcessor = cmRfcMrgProcessor;
}
public void setDjValidator(CmsDJValidator djValidator) {
this.djValidator = djValidator;
}
public int processManifestPlatform(CmsCI platformCi, CmsCIRelation bindingRel, String nsPath, int startExecOrder, Map<String,String> globalVars, Map<String,String> cloudVars,String userId, boolean usePercent){
return processManifestPlatform(platformCi, bindingRel, nsPath, startExecOrder, globalVars, cloudVars, userId, false, usePercent);
}
public int processManifestPlatform(CmsCI platformCi, CmsCIRelation bindingRel, String nsPath, int startExecOrder, Map<String,String> globalVars, Map<String,String> cloudVars, String userId, boolean createPlatNs, boolean usePercent){
if (startExecOrder <= priorityMax) startExecOrder = priorityMax+1;
long startingTime = System.currentTimeMillis();
int maxExecOrder = 0;
if (createPlatNs) {
if (platformCi.getCiClassName().equals("manifest.Iaas")) {
nsPath = nsPath + "/" + platformCi.getCiName();
} else {
nsPath = nsPath + "/" + platformCi.getCiName() + "/" + platformCi.getAttribute("major_version").getDjValue();
}
trUtil.verifyAndCreateNS(nsPath);
}
logger.info(nsPath + " >>> Start working on " + platformCi.getCiName() + ", cloud - " + bindingRel.getToCi().getCiName());
Map<String,String> localVars = cmsUtil.getLocalVars(platformCi);
List<CmsCIRelation> mfstPlatComponents = cmProcessor.getFromCIRelations(platformCi.getCiId(), null, "Requires", null);
if (mfstPlatComponents.size() > 0) {
String manifestNs = mfstPlatComponents.get(0).getNsPath();
boolean isPartial = isPartialDeployment(manifestNs);
List<BomRfc> boms = new ArrayList<BomRfc>();
Map<String, List<String>> mfstId2nodeId = new HashMap<String,List<String>>();
CmsCI startingPoint = mfstPlatComponents.get(0).getToCi();
Map<String, Integer> namesMap = new HashMap<String, Integer>();
Map<Long,Map<String,List<CmsCIRelation>>> manifestDependsOnRels = new HashMap<Long,Map<String,List<CmsCIRelation>>>();
while (startingPoint != null) {
BomRfc newBom = bootstrapNewBom(startingPoint, namesMap, bindingRel.getToCiId(), 1);
boms.add(newBom);
mfstId2nodeId.put(String.valueOf(newBom.manifestCiId) + "-" + 1, new ArrayList<String>(Arrays.asList(newBom.nodeId)));
boms.addAll(processNode(newBom, namesMap, bindingRel, mfstId2nodeId, manifestDependsOnRels, 1, usePercent, 1));
startingPoint = getStartingPoint(mfstPlatComponents, boms);
}
// this is needed to work around ibatis
// if there is no any updates within current transaction
// ibatis would not return a new object as query result but instead a ref to the previousely created one
// if it was modified outside - the changes will not be reset
for(BomRfc bom : boms) {
bom.mfstCi = trUtil.cloneCI(bom.mfstCi);
}
//process vars
processVars(boms, cloudVars, globalVars, localVars);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", starting creating rfcs");
long bomCreationStartTime = System.currentTimeMillis();
Long releaseId = null;
ExistingRels existingRels = new ExistingRels(nsPath);
Map<String, CmsCI> existingCIs = getExistingCis(bindingRel.getToCiId(), nsPath);
Map<String, CmsRfcCI> existingRFCs = getOpenRFCs(nsPath);
maxExecOrder = createBomRfcsAndRels(boms, nsPath, bindingRel, startExecOrder, isPartial, userId, existingRels, existingCIs, existingRFCs, releaseId);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with main RFCs and relations, time spent - " + (System.currentTimeMillis() - bomCreationStartTime));
Map<Long, List<BomRfc>> bomsMap = buildMfstToBomRfcMap(boms);
long mngviaStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", processing managed via");
processManagedViaRels(mfstPlatComponents,bomsMap,nsPath, userId, existingRels, releaseId);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with managed via, time spent - " + (System.currentTimeMillis() - mngviaStartTime));
long secByStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", processing secured by");
processSecuredByRels(mfstPlatComponents,bomsMap,nsPath, userId, existingRels, releaseId);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with secured by, time spent - " + (System.currentTimeMillis() - secByStartTime));
long entryPointStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", processing entry point");
processEntryPointRel(platformCi.getCiId(),bomsMap, nsPath, userId, existingRels);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with entry point, time spent - " + (System.currentTimeMillis() - entryPointStartTime));
if (!usePercent || !isPartial) {
if (maxExecOrder == 0) maxExecOrder++;
long obsoleteStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", finding obsolete boms");
maxExecOrder = findObsolete(boms, bindingRel, nsPath, maxExecOrder, existingCIs, userId, false);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with obsolete boms, time spent - " + (System.currentTimeMillis() - obsoleteStartTime));
}
if (logger.isDebugEnabled()) {
for(BomRfc bom : boms) {
logger.debug(bom.ciName + "::" + bom.execOrder);
}
}
//help gc a little bit
existingRels = null;
existingCIs = null;
existingRFCs = null;
}
long timeTook = System.currentTimeMillis() - startingTime;
logger.info(nsPath + ">>> Done with " + platformCi.getCiName() + ", cloud - " + bindingRel.getToCi().getCiName() + ", Time to process - " + timeTook + " ms.");
return maxExecOrder;
}
private boolean isPartialDeployment(String manifestNs) {
List<CmsCIRelation> dependsOns = cmProcessor.getCIRelationsNaked(manifestNs, "manifest.DependsOn", null, null, null);
for (CmsCIRelation rel : dependsOns) {
if (rel.getAttribute("pct_dpmt") != null && !"100".equals(rel.getAttribute("pct_dpmt").getDjValue())){
return true;
}
}
return false;
}
private CmsCI getStartingPoint(List<CmsCIRelation> mfstPlatComponents, List<BomRfc> boms) {
Set<Long> processedNodes = new HashSet<Long>();
for (BomRfc bom : boms) {
processedNodes.add(bom.manifestCiId);
}
for (CmsCIRelation manifestRel : mfstPlatComponents) {
if (!processedNodes.contains(manifestRel.getToCiId())) {
return manifestRel.getToCi();
}
}
return null;
}
private void processVars(List<BomRfc> boms, Map<String,String> cloudVars, Map<String,String> globalVars, Map<String,String> localVars) {
for (BomRfc bom : boms) {
trUtil.processAllVars(bom.mfstCi, cloudVars, globalVars, localVars);
}
}
private int findObsolete(List<BomRfc> newBoms, CmsCIRelation bindingRel, String nsPath, int startingExecOrder, Map<String, CmsCI> existingCIs,String userId, boolean global) {
logger.info(nsPath + " >>> finding cis to delete..." );
long startTime = System.currentTimeMillis();
int maxExecOrder = startingExecOrder;
Map<String, BomRfc> bomMap = new HashMap<String, BomRfc>();
for (BomRfc bom : newBoms) {
bomMap.put(bom.ciName, bom);
}
List<CmsCI> existingCis = new ArrayList<CmsCI>(existingCIs.values());
Map<Long, CmsCI> obsoleteCisMap = new HashMap<Long, CmsCI>();
for (CmsCI ci : existingCis) {
if (!bomMap.containsKey(ci.getCiName())) {
logger.info("This ci should be deleted - " + ci.getCiName());
obsoleteCisMap.put(ci.getCiId(), ci);
}
}
logger.info(nsPath + " >>> creating delete rfcs and traversing strong relations..." );
if (obsoleteCisMap.size()>0) {
maxExecOrder = processObsolete(newBoms, obsoleteCisMap, startingExecOrder, nsPath, userId, global);
}
logger.info(nsPath + " >>> Done creating delete rfcs, time taken:" + (System.currentTimeMillis() - startTime));
return maxExecOrder;
}
private int processObsolete(List<BomRfc> bomRfcs, Map<Long, CmsCI> obsoleteCisMap, int startingExecOrder, String nsPath, String userId, boolean global){
int maxExecOrder = startingExecOrder;
Set<Long> obsoleteToRelations = new HashSet<Long>();
Map<Long, List<CmsLink>> obsoleteFromRelations = new HashMap<Long, List<CmsLink>>();
List<CmsLink> dummyUpdateRels = new ArrayList<CmsLink>();
List<CmsLink> dependsOnLinks = cmProcessor.getLinks(nsPath, "bom.DependsOn");
//convert to map
Map<Long, List<CmsLink>> toCiDependsOnMap = new HashMap<Long, List<CmsLink>>();
for (CmsLink link : dependsOnLinks) {
if (!toCiDependsOnMap.containsKey(link.getToCiId())) {
toCiDependsOnMap.put(link.getToCiId(), new ArrayList<CmsLink>());
}
toCiDependsOnMap.get(link.getToCiId()).add(link);
}
for (Long ciId : obsoleteCisMap.keySet()) {
//List<CmsCIRelation> toDependsOnRels = cmProcessor.getToCIRelationsNakedNoAttrs(ciId, "bom.DependsOn", null, null);
if (toCiDependsOnMap.containsKey(ciId)) {
for (CmsLink fromDependsOnCiIdLink : toCiDependsOnMap.get(ciId)) {
if (obsoleteCisMap.containsKey(fromDependsOnCiIdLink.getFromCiId())) {
obsoleteToRelations.add(ciId);
if (!obsoleteFromRelations.containsKey(fromDependsOnCiIdLink.getFromCiId())) {
obsoleteFromRelations.put(fromDependsOnCiIdLink.getFromCiId(), new ArrayList<CmsLink>());
}
obsoleteFromRelations.get(fromDependsOnCiIdLink.getFromCiId()).add(fromDependsOnCiIdLink);
} else {
dummyUpdateRels.add(fromDependsOnCiIdLink);
}
}
}
}
Map<Long, Integer> execOrder = new HashMap<Long, Integer>();
for (Long ciId : obsoleteCisMap.keySet()) {
if (!obsoleteToRelations.contains(ciId)) {
execOrder.put(ciId, startingExecOrder);
processObsoleteOrder(ciId, execOrder, obsoleteFromRelations);
}
}
for (Long ciId : execOrder.keySet()) {
int ciExecOrder = execOrder.get(ciId);
CmsCI ci = obsoleteCisMap.get(ciId);
String shortClazzName = trUtil.getShortClazzName(ci.getCiClassName());
int actualExecOrder = ciExecOrder;
if (priorityMap.containsKey(shortClazzName)) {
int priorityOrder = priorityMap.get(shortClazzName);
actualExecOrder = startingExecOrder + obsoleteCisMap.size() + priorityMax - priorityOrder + 1;
}
createDeleteRfc(ci,actualExecOrder, userId);
maxExecOrder = (ciExecOrder > maxExecOrder) ? ciExecOrder : maxExecOrder;
}
Map<Long, List<String>> manifestPropagations = new HashMap<Long, List<String>>();
Set<Long> propagations = new HashSet<Long>();
long totalPropagationTime = 0;
//now lets submit submit dummy update
Set<Long> dummyUpdates = new HashSet<Long>();
if (dummyUpdateRels.size()>0) {
for (CmsLink rel : dummyUpdateRels) {
dummyUpdates.add(rel.getFromCiId());
for (BomRfc bomRfc : bomRfcs) {
if (bomRfc.rfc == null) {
logger.info("bom.rfc null for " + bomRfc.ciName + " nspath: " + nsPath);;
} else if (bomRfc.rfc.getCiId() == rel.getFromCiId()) {
long startTime = System.currentTimeMillis();
mapPropagations(bomRfc.manifestCiId, manifestPropagations);
if (manifestPropagations.get(bomRfc.manifestCiId).size() != 0) {
propagateUpdate(bomRfc.rfc.getCiId(), bomRfc.manifestCiId, manifestPropagations, userId, propagations);
}
long endTime = System.currentTimeMillis();
totalPropagationTime += totalPropagationTime + (endTime - startTime);
}
}
}
}
dummyUpdates.addAll(propagations);
maxExecOrder = processDummyUpdates(dummyUpdates, bomRfcs, maxExecOrder);
logger.info(nsPath + " >>> Total time taken by propagation in seconds: " + totalPropagationTime/1000.0);
return maxExecOrder;
}
private int processDummyUpdates(Set<Long> dummyUpdates,
List<BomRfc> bomRfcs, int maxExecOrder) {
if (dummyUpdates.size() > 0) {
TreeMap<Integer, List<Long>> dummyUpdateExecOrders = new TreeMap<Integer, List<Long>>();
//now lets grab the execution orders from the bomRfcs for the CIs to be dummy updated.
for (BomRfc bom : bomRfcs) {
if (bom.rfc == null) {
logger.info("rfc null for: " + bom.ciName);
continue;
}
if (dummyUpdates.contains(bom.rfc.getCiId())) {
List<Long> ciIds = dummyUpdateExecOrders.get(bom.execOrder);
if (ciIds == null) {
ciIds = new ArrayList<Long>();
dummyUpdateExecOrders.put(bom.execOrder, ciIds);
}
ciIds.add(bom.rfc.getCiId());
}
}
//Now lets iterate over the sorted order map to touch the dummy update CIs with exec order starting from max exec order
for (int order : dummyUpdateExecOrders.keySet()) {
maxExecOrder++;
for (long dummyUpdateCiId : dummyUpdateExecOrders.get(new Integer(order))) {
cmRfcMrgProcessor.createDummyUpdateRfc(dummyUpdateCiId, null, maxExecOrder, "oneops-transistor");
}
}
}
return maxExecOrder;
}
private void createDeleteRfc(CmsCI ci, int execOrder, String userId)
{
CmsRfcCI newRfc = new CmsRfcCI();
newRfc.setCiId(ci.getCiId());
newRfc.setCiClassId(ci.getCiClassId());
newRfc.setCiClassName(ci.getCiClassName());
newRfc.setCiGoid(ci.getCiGoid());
newRfc.setCiName(ci.getCiName());
newRfc.setComments("deleting");
newRfc.setNsId(ci.getNsId());
newRfc.setNsPath(ci.getNsPath());
newRfc.setRfcAction("delete");
newRfc.setExecOrder(execOrder);
newRfc.setCreatedBy(userId);
newRfc.setUpdatedBy(userId);
rfcProcessor.createRfcCI(newRfc, userId);
}
private void processObsoleteOrder(long startingCiId, Map<Long, Integer> execOrder, Map<Long, List<CmsLink>> obsoleteRelations) {
if (obsoleteRelations.containsKey(startingCiId)) {
int nextExecOrder = execOrder.get(startingCiId) + 1;
for (CmsLink rel : obsoleteRelations.get(startingCiId)) {
long nextCiId = rel.getToCiId();
if (execOrder.containsKey(nextCiId)) {
int currentEO = execOrder.get(nextCiId);
if (nextExecOrder > currentEO) {
execOrder.put(nextCiId, nextExecOrder);
}
} else {
execOrder.put(nextCiId, nextExecOrder);
}
processObsoleteOrder(nextCiId, execOrder, obsoleteRelations);
}
}
}
private int createBomRfcsAndRels(List<BomRfc> boms,
String nsPath,
CmsCIRelation bindingRel,
int startExecOrder,
boolean isPartial,
String userId,
ExistingRels existingRels,
Map<String, CmsCI> existingCIs,
Map<String, CmsRfcCI> existingRFCs,
Long releaseId) {
long nsId = trUtil.verifyAndCreateNS(nsPath);
Map<String, BomRfc> bomMap = new HashMap<String, BomRfc>();
for (BomRfc bom : boms) {
bomMap.put(bom.nodeId, bom);
}
// need to verify all the to links for the case when we have converge link
verifyToLinks(bomMap);
//lets find out the exec order and populate relations list
Map<String, BomLink> links = new HashMap<String, BomLink>();
for (BomRfc bom :boms) {
if (bom.fromLinks.size()==0) {
processOrder(bom, bomMap, startExecOrder, 1);
} else {
for (BomLink link : bom.fromLinks) {
links.put(link.fromNodeId + "@" + link.toNodeId, link);
//logger.info(link.fromNodeId + "-" + link.toNodeId);
}
}
}
int maxExecOrder = getMaxExecOrder(boms);
Map<Integer, List<BomRfc>> orderedMap = new HashMap<Integer, List<BomRfc>>();
for (BomRfc bom : boms) {
if (!orderedMap.containsKey(bom.execOrder)) {
orderedMap.put(bom.execOrder, new ArrayList<BomRfc>());
}
orderedMap.get(bom.execOrder).add(bom);
}
Set<Long> propagations = new HashSet<Long>();
Set<Long> bomCiIds = new HashSet<Long>();
Map<Long, List<String>> manifestPropagations = new HashMap<Long, List<String>>();
long timeTakenByPropagation = 0;
//now lets create rfcs
int realExecOrder = startExecOrder;
int numberOfRFCs = 0;
List<CmsRfcCI> replacedComputes = new ArrayList<CmsRfcCI>();
for (int i=startExecOrder; i<=maxExecOrder; i++) {
boolean incOrder = false;
if (orderedMap.containsKey(i)) {
for (BomRfc bom : orderedMap.get(i)) {
String shortClazzName = trUtil.getShortClazzName(bom.mfstCi.getCiClassName());
String bomId = "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName()) + ":" + bom.ciName;
CmsCI existingCi = existingCIs.get(bomId);
CmsRfcCI existingRfc = existingRFCs.get(bomId);
boolean rfcCreated = false;
if (priorityMap.containsKey(shortClazzName)) {
bom.execOrder = priorityMap.get(shortClazzName);
rfcCreated = upsertRfcs(bom, existingCi, existingRfc, nsId, nsPath, bindingRel, releaseId, userId, existingRels);
if (rfcCreated && realExecOrder == 1) incOrder = true;
} else {
//bom.execOrder = realExecOrder;
rfcCreated = upsertRfcs(bom, existingCi, existingRfc, nsId, nsPath, bindingRel, releaseId, userId, existingRels);
if (rfcCreated && bom.rfc != null) {
//if rfc was created, lets check if any propagation is required
if(bom.rfc.getCiClassName().equals("bom.Compute")
&& bom.rfc.getRfcAction().equals("replace")) {
replacedComputes.add(bom.rfc);
}
long startTime = System.currentTimeMillis();
if (manifestPropagations.get(bom.manifestCiId) == null) {
mapPropagations(bom.manifestCiId, manifestPropagations);
}
if (manifestPropagations.get(bom.manifestCiId).size() != 0) {
propagateUpdate(bom.rfc.getCiId(), bom.manifestCiId, manifestPropagations, userId, propagations);
}
long endTime = System.currentTimeMillis();
timeTakenByPropagation = timeTakenByPropagation + (endTime - startTime);
}
incOrder = incOrder || rfcCreated;
}
if (bom.rfc != null) {
bomCiIds.add(bom.rfc.getCiId());
}
if (rfcCreated) {
numberOfRFCs++;
if (numberOfRFCs % 10 == 0) {
logger.info(">>> Inserted " + numberOfRFCs + " rfcs;");
}
}
}
}
if (incOrder) realExecOrder++;
}
logger.info(">>> Inserted " + numberOfRFCs + " rfcs;");
logger.info(">>> Done with RFCs working on relations...");
//lets create dependsOn Relations
//TODO question should we propagate rel attrs
int maxRfcExecOrder = getMaxRfcExecOrder(boms);
maxExecOrder = (maxRfcExecOrder > 0) ? maxRfcExecOrder : maxExecOrder;
//execute all dummmy updates in one last step
//maxExecOrder++;
//List<CmsRfcRelation> existingDependsOnRels = cmRfcMrgProcessor.getDfDjRelations("bom.DependsOn", null, nsPath, null, null, null);
Set<String> djRelGoids = new HashSet<String>();
boolean increaseMaxOrder = false;
int numberOfRelRFCs = 0;
for (BomLink link : links.values()) {
if (bomMap.get(link.fromNodeId).rfc != null &&
bomMap.get(link.toNodeId).rfc != null) {
long fromCiId = bomMap.get(link.fromNodeId).rfc.getCiId();
long toCiId = bomMap.get(link.toNodeId).rfc.getCiId();
CmsRfcRelation dependsOn = bootstrapRelationRfc(fromCiId,toCiId,"bom.DependsOn", nsPath, existingRels);
dependsOn.setComments(generateRelComments(bomMap.get(link.fromNodeId).rfc.getCiName(),
bomMap.get(link.fromNodeId).rfc.getCiClassName(),
bomMap.get(link.toNodeId).rfc.getCiName(),
bomMap.get(link.toNodeId).rfc.getCiClassName()));
dependsOn.setCreatedBy(userId);
dependsOn.setUpdatedBy(userId);
dependsOn.setNsId(nsId);
if (bomMap.get(link.fromNodeId).rfc.getRfcId() > 0) {
dependsOn.setFromRfcId(bomMap.get(link.fromNodeId).rfc.getRfcId());
}
if (bomMap.get(link.toNodeId).rfc.getRfcId() >0) {
dependsOn.setToRfcId(bomMap.get(link.toNodeId).rfc.getRfcId());
}
//since the DependsOn validation happened on Manifest level already we will skip validation here for perf reasons
//dependsOn.setValidated(true);
//CmsRfcRelation newRel = cmRfcMrgProcessor.upsertRfcRelationNoCheck(dependsOn, userId, "dj");
createBomRelationRfc(dependsOn, existingRels, releaseId);
djRelGoids.add(dependsOn.getRelationGoid());
//if we got new relation lets update create dummy update rfcs
if (dependsOn.getRfcId()>0) {
numberOfRelRFCs++;
existingRels.addRelRfc(dependsOn);
if (bomMap.get(link.fromNodeId).rfc.getRfcId()==0) {
cmRfcMrgProcessor.createDummyUpdateRfc(fromCiId, null, bomMap.get(link.fromNodeId).execOrder, userId);
long startTime = System.currentTimeMillis();
if (manifestPropagations.get(bomMap.get(link.fromNodeId).manifestCiId) == null) {
mapPropagations(bomMap.get(link.fromNodeId).manifestCiId, manifestPropagations);
}
if (manifestPropagations.get(bomMap.get(link.fromNodeId).manifestCiId).size() != 0) {
propagateUpdate(fromCiId, bomMap.get(link.fromNodeId).manifestCiId, manifestPropagations, userId, propagations);
}
long endTime = System.currentTimeMillis();
timeTakenByPropagation = timeTakenByPropagation + (endTime - startTime);
increaseMaxOrder = true;
}
if (numberOfRelRFCs % 10 == 0) {
logger.info(">>> Inserted " + numberOfRelRFCs + " relation rfcs;");
}
}
}
}
logger.info(">>> Inserted " + numberOfRelRFCs + " relation rfcs;");
//Now create dummy updates for all the dependency-propagations needed
if (propagations.size() > 0) {
for (BomRfc bom : boms) {
if (bom.rfc == null) {
logger.info("rfc null for: " + bom.ciName);
continue;
}
if (propagations.contains(bom.rfc.getCiId())) {
String bomId = "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName()) + ":" + bom.ciName;
CmsCI existingCi = existingCIs.get(bomId);
CmsRfcCI existingRfc = existingRFCs.get(bomId);
CmsRfcCI rfc = bootstrapRfc(bom,existingRfc, existingCi, nsPath);
rfc.setCreatedBy(userId);
rfc.setUpdatedBy(userId);
rfc.setNsId(nsId);
cmRfcMrgProcessor.createDummyUpdateRfc(rfc.getCiId(), null, bom.execOrder, userId);
}
}
}
//hack for lb/fqdn update on replaced computes
propagate4ComputeReplace(replacedComputes);
if (!isPartial) {
for (CmsCIRelation existingRel : existingRels.getExistingRel(BOM_DEPENDS_ON_RELATION_NAME)) {
if (!djRelGoids.contains(existingRel.getRelationGoid())
&& bomCiIds.contains(existingRel.getFromCiId())
&& bomCiIds.contains(existingRel.getToCiId())) {
cmRfcMrgProcessor.requestRelationDelete(existingRel.getCiRelationId(), userId);
}
}
}
if (increaseMaxOrder) maxExecOrder++;
logger.info(nsPath + " >>> Total time taken by propagation in seconds: " + timeTakenByPropagation/1000);
return maxExecOrder;
}
private void propagate4ComputeReplace(List<CmsRfcCI> bomCompRfcs) {
for (CmsRfcCI rfc : bomCompRfcs) {
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Lb")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Fqdn")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
}
}
private Map<String, CmsCI> getExistingCis(long cloudId, String nsPath) {
List<CmsCIRelation> bomRels = cmProcessor.getToCIRelationsByNs(cloudId, BOM_CLOUD_RELATION_NAME, null, null, nsPath);
Map<String, CmsCI> bomCIs = new HashMap<String, CmsCI>();
for (CmsCIRelation rel : bomRels) {
CmsCI bomCi = rel.getFromCi();
String key =bomCi.getCiClassName() + ":" + bomCi.getCiName();
bomCIs.put(key, bomCi);
}
return bomCIs;
}
private Map<String, Map<String,CmsCIRelation>> getExistingRelations(String nsPath) {
List<CmsCIRelation> bomRels = cmProcessor.getCIRelationsNaked(nsPath, null, null, null, null);
Map<String, Map<String,CmsCIRelation>> bomRelsMap = new HashMap<String, Map<String,CmsCIRelation>>();
for (CmsCIRelation rel : bomRels) {
if (!bomRelsMap.containsKey(rel.getRelationName())) {
bomRelsMap.put(rel.getRelationName(), new HashMap<String,CmsCIRelation>());
}
bomRelsMap.get(rel.getRelationName()).put(rel.getFromCiId() + ":" + rel.getToCiId(), rel);
}
return bomRelsMap;
}
private Map<String, CmsRfcCI> getOpenRFCs(String nsPath) {
List<CmsRfcCI> existingRfcs = rfcProcessor.getOpenRfcCIByClazzAndName(nsPath, null, null);
Map<String, CmsRfcCI> rfcs = new HashMap<String, CmsRfcCI>();
for (CmsRfcCI rfc : existingRfcs) {
String key = rfc.getCiClassName() + ":" + rfc.getCiName();
rfcs.put(key,rfc);
}
return rfcs;
}
private Map<String, Map<String,CmsRfcRelation>> getOpenRelationsRfcs(String nsPath) {
List<CmsRfcRelation> bomRels = rfcProcessor.getOpenRfcRelationsByNs(nsPath);
Map<String, Map<String,CmsRfcRelation>> bomRelsMap = new HashMap<String, Map<String,CmsRfcRelation>>();
for (CmsRfcRelation rel : bomRels) {
if (!bomRelsMap.containsKey(rel.getRelationName())) {
bomRelsMap.put(rel.getRelationName(), new HashMap<String,CmsRfcRelation>());
}
bomRelsMap.get(rel.getRelationName()).put(rel.getFromCiId() + ":" + rel.getToCiId(), rel);
}
return bomRelsMap;
}
private void propagateUpdate(long bomCiId, long manifestId,
Map<Long, List<String>> manifestPropagations, String userId, Set<Long> propagations) {
List<String> targetManifestCiNames = manifestPropagations.get(manifestId);
List<CmsCIRelation> rels = cmProcessor.getAllCIRelations(bomCiId);// all bom relations for this bom ci
if (targetManifestCiNames == null) {
logger.info("nothing to propagate for bomCiId: " + bomCiId + " and manifestCiId: " + manifestId);
return;
}
for (String targetCiName : targetManifestCiNames) {
for (CmsCIRelation rel : rels) {
if (! rel.getRelationName().equals("bom.DependsOn")) {
continue;
}
if (rel.getFromCi() != null) {
String ciName = rel.getFromCi().getCiName();
if (ciName != null && ciName.startsWith(targetCiName + "-")) {
if (propagations.contains(rel.getFromCiId())) {
continue;
}
logger.info("propagating update from bom cid : " + bomCiId + " to " + rel.getFromCiId());
propagations.add(rel.getFromCiId());
List<CmsCIRelation> realizedAs = cmProcessor.getToCIRelations(rel.getFromCiId(),
"base.RealizedAs", rel.getFromCi().getCiClassName().replaceFirst("bom", "manifest"));
if (realizedAs != null) {
propagateUpdate(rel.getFromCiId(), realizedAs.get(0).getFromCiId(), manifestPropagations, userId, propagations);
}
}
} else if (rel.getToCi() != null) {
String ciName = rel.getToCi().getCiName();
if (ciName != null && ciName.startsWith(targetCiName + "-")) {
if (propagations.contains(rel.getToCiId())) {
continue;
}
logger.info("propagating update from bom cid : " + bomCiId + " to " + rel.getToCiId());
propagations.add(rel.getToCiId());
List<CmsCIRelation> realizedAs = cmProcessor.getToCIRelations(rel.getToCiId(),
"base.RealizedAs", rel.getToCi().getCiClassName().replaceFirst("bom", "manifest"));
if (realizedAs != null) {
propagateUpdate(rel.getToCiId(), realizedAs.get(0).getFromCiId(), manifestPropagations, userId, propagations);
}
}
}
}
}
}
private void mapPropagations(long manifestCiId, Map<Long, List<String>> manifestPropagations) {
List<String> targetManifests = manifestPropagations.get(manifestCiId);
if (targetManifests != null) {
return;//propagations already calculated for this manifest cid
}
targetManifests = new ArrayList<String>();
manifestPropagations.put(manifestCiId, targetManifests);
List<CmsCIRelation> rels = cmProcessor.getAllCIRelations(manifestCiId);
for (CmsCIRelation rel : rels) {
if (! rel.getRelationName().equals("manifest.DependsOn")) {
continue;
}
CmsCIRelationAttribute attrib = rel.getAttribute("propagate_to");
if (attrib != null && attrib.getDfValue() != null
) {
if (rel.getFromCiId() > 0
&&rel.getFromCiId() == manifestCiId
&& (attrib.getDfValue().equalsIgnoreCase("to") || attrib.getDfValue().equalsIgnoreCase("both"))) {
//found
targetManifests.add(rel.getToCi().getCiName());
mapPropagations(rel.getToCiId(), manifestPropagations);
} else if (rel.getToCiId() > 0
&&rel.getToCiId() == manifestCiId
&& (attrib.getDfValue().equalsIgnoreCase("from") || attrib.getDfValue().equalsIgnoreCase("both"))) {
//found
targetManifests.add(rel.getFromCi().getCiName());
mapPropagations(rel.getFromCiId(), manifestPropagations);
}
}
}
}
private void verifyToLinks(Map<String, BomRfc> bomMap) {
for (Map.Entry<String, BomRfc> entry : bomMap.entrySet()) {
for (BomLink link : entry.getValue().fromLinks) {
BomRfc toBom = bomMap.get(link.toNodeId);
if (toBom.getExisitngToLinks(entry.getValue().nodeId) == null) {
BomLink tolink = new BomLink();
tolink.fromNodeId = entry.getValue().nodeId;
tolink.fromMfstCiId = entry.getValue().manifestCiId;
tolink.toNodeId = toBom.nodeId;
tolink.toMfstCiId = toBom.manifestCiId;
toBom.toLinks.add(tolink);
}
}
}
}
private boolean upsertRfcs(BomRfc bom, CmsCI existingCi, CmsRfcCI existingRfc, long nsId, String nsPath, CmsCIRelation bindingRel, Long releaseId, String userId, ExistingRels existingRels) {
boolean rfcCreated = false;
if (bom.mfstCi.getCiState().equalsIgnoreCase("pending_deletion")) {
List<CmsRfcCI> cis2delete = cmRfcMrgProcessor.getDfDjCi(nsPath, "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName()), bom.ciName, "dj");
if (cis2delete.size() > 0) {
for (CmsRfcCI ci2delete : cis2delete) {
//bom.rfc = cmRfcMrgProcessor.requestCiDelete(ci2delete.getCiId(), userId, bom.execOrder);
bom.rfc = cmRfcMrgProcessor.requestCiDeleteCascadeNoRelsRfcs(ci2delete.getCiId(), userId, bom.execOrder);
rfcCreated = bom.rfc.getRfcId() > 0;
}
} else {
//if no boms lets see if we have some in other cloud
if (cmProcessor.getCountFromCIRelationsByNS(bom.mfstCi.getCiId(), "base.RealizedAs", null, null, nsPath, false) == 0) {
cmProcessor.deleteCI(bom.mfstCi.getCiId(), true, userId);
}
}
} else {
CmsRfcCI rfc = bootstrapRfc(bom, existingRfc, existingCi, nsPath);
rfc.setCreatedBy(userId);
rfc.setUpdatedBy(userId);
rfc.setNsId(nsId);
//bom.rfc = cmRfcMrgProcessor.upsertRfcCINoChecks(rfc, userId, "dj");
createBomRfc(rfc,existingCi, existingRfc, releaseId);
bom.rfc = rfc;
rfcCreated = bom.rfc.getRfcId() > 0;
if (bom.rfc.getRfcId() == 0) {
//lets make sure the manifest object has not changed or we will create dummy update
CmsCIRelation realizedAsRel = existingRels.getExistingRel(BOM_REALIZED_RELATION_NAME, bom.mfstCi.getCiId(), bom.rfc.getCiId());
//cmProcessor.getFromToCIRelations(bom.mfstCi.getCiId(), "base.RealizedAs", bom.rfc.getCiId());
if (realizedAsRel != null && realizedAsRel.getAttribute("last_manifest_rfc") != null) {
long deployedManifestRfc = Long.valueOf(realizedAsRel.getAttribute("last_manifest_rfc").getDjValue());
if (bom.mfstCi.getLastAppliedRfcId() > deployedManifestRfc) {
//TODO convert to direct insert
bom.rfc = cmRfcMrgProcessor.createDummyUpdateRfc(bom.rfc.getCiId(), null, bom.execOrder, userId);
rfcCreated = true;
}
}
}
//lets create RealizedAs relation
Map<String,String> attrs = new HashMap<String,String>();
attrs.put("last_manifest_rfc", String.valueOf(bom.mfstCi.getLastAppliedRfcId()));
CmsRfcRelation realizedAs = bootstrapRelationRfcWithAttributes(bom.mfstCi.getCiId(), bom.rfc.getCiId(), "base.RealizedAs", nsPath, attrs, existingRels);
if (rfcCreated) {
realizedAs.setToRfcId(bom.rfc.getRfcId());
}
realizedAs.setComments(generateRelComments(bom.mfstCi.getCiName(), bom.mfstCi.getCiClassName(), bom.rfc.getCiName(), bom.rfc.getCiClassName()));
realizedAs.getAttribute("priority").setNewValue(bindingRel.getAttribute("priority").getDjValue());
realizedAs.setCreatedBy(userId);
realizedAs.setUpdatedBy(userId);
realizedAs.setNsId(nsId);
//validateRelRfc(realizedAs, bom.mfstCi.getCiClassId(), bom.rfc.getCiClassId());
//realizedAs.setValidated(true);
createBomRelationRfc(realizedAs, existingRels, releaseId);
//cmRfcMrgProcessor.upsertRfcRelationNoCheck(realizedAs, userId, "dj");
//lest create relation to the binding
CmsRfcRelation deployedTo = bootstrapRelationRfc(bom.rfc.getCiId(), bindingRel.getToCiId(), "base.DeployedTo", nsPath, existingRels);
deployedTo.setComments(generateRelComments(bom.rfc.getCiName(), bom.rfc.getCiClassName(), bindingRel.getToCi().getCiName(), bindingRel.getToCi().getCiClassName()));
deployedTo.getAttribute("priority").setNewValue(bindingRel.getAttribute("priority").getDjValue());
deployedTo.setCreatedBy(userId);
deployedTo.setUpdatedBy(userId);
deployedTo.setNsId(nsId);
//validateRelRfc(deployedTo, bom.rfc.getCiClassId(), bindingRel.getToCi().getCiClassId());
//deployedTo.setValidated(true);
if (rfcCreated) {
deployedTo.setFromRfcId(bom.rfc.getRfcId());
}
createBomRelationRfc(deployedTo, existingRels, releaseId);
//cmRfcMrgProcessor.upsertRfcRelationNoCheck(deployedTo, userId, "dj");
}
return rfcCreated;
}
private void createBomRfc(CmsRfcCI rfc, CmsCI existingCi, CmsRfcCI existingRfc, Long releaseId) {
if (rfc.getCiId() == 0) {
//this is add rfc
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
rfc.setRfcAction("add");
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfc(rfc);
} else {
rfcProcessor.updateBomRfc(rfc, existingRfc);
}
} else {
//need to figure out delta and create update rfc
if (needUpdateRfc(rfc, existingCi)) {
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfc(rfc);
} else {
rfcProcessor.updateBomRfc(rfc, existingRfc);
}
/*
if(rfc.getCiClassName().equals("bom.Compute")
&& rfc.getRfcAction().equals("replace")) {
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Lb")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Fqdn")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
}
*/
}
}
}
private boolean needUpdateRfc(CmsRfcCI rfcCi, CmsCI baseCi) {
boolean needUpdate = false;
if ("replace".equals(baseCi.getCiState())) {
rfcCi.setRfcAction("replace");
needUpdate = true;
} else {
rfcCi.setRfcAction("update");
}
Set<String> equalAttrs = new HashSet<String>( rfcCi.getAttributes().size());
for (CmsRfcAttribute attr : rfcCi.getAttributes().values()){
CmsCIAttribute existingAttr = baseCi.getAttribute(attr.getAttributeName());
if (djValidator.equalStrs(attr.getNewValue(), existingAttr.getDjValue())) {
equalAttrs.add(attr.getAttributeName());
} else {
needUpdate = true;
}
}
if (needUpdate) {
for (String equalAttrName : equalAttrs) {
rfcCi.getAttributes().remove(equalAttrName);
}
}
return needUpdate;
}
private void createBomRelationRfc(CmsRfcRelation rfc, ExistingRels existingRels, Long releaseId) {
if (rfc.getCiRelationId() == 0) {
//this is add rfc
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
rfc.setRfcAction("add");
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfcRelation(rfc);
} else {
rfcProcessor.updateBomRfcRelation(rfc, existingRels.getOpenRelRfc(rfc.getRelationName(), rfc.getFromCiId(), rfc.getToCiId()));
}
} else {
//need to figure out delta and create update rfc
CmsCIRelation existingRel = existingRels.getExistingRel(rfc.getRelationName(), rfc.getFromCiId(), rfc.getToCiId());
if (needUpdateRfcRelation(rfc, existingRel)) {
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
rfc.setRfcAction("update");
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfcRelation(rfc);
} else {
rfcProcessor.updateBomRfcRelation(rfc, existingRels.getOpenRelRfc(rfc.getRelationName(), rfc.getFromCiId(), rfc.getToCiId()));
}
}
}
}
private boolean needUpdateRfcRelation(CmsRfcRelation rfcRel, CmsCIRelation baseRel) {
boolean needUpdate = false;
Set<String> equalAttrs = new HashSet<String>( rfcRel.getAttributes().size());
for (CmsRfcAttribute attr : rfcRel.getAttributes().values()){
CmsCIRelationAttribute existingAttr = baseRel.getAttribute(attr.getAttributeName());
if (djValidator.equalStrs(attr.getNewValue(), existingAttr.getDjValue())) {
equalAttrs.add(attr.getAttributeName());
} else {
needUpdate = true;
}
}
if (needUpdate) {
for (String attrName : equalAttrs) {
rfcRel.getAttributes().remove(attrName);
}
}
return needUpdate;
}
private void validateRelRfc(CmsRfcRelation rfcRelation, int fromClassId, int toClassId) {
CIValidationResult validation = djValidator.validateRfcRelation(rfcRelation, fromClassId, toClassId);
if (!validation.isValidated()) {
logger.error(validation.getErrorMsg());
throw new DJException(CmsError.DJ_VALIDATION_ERROR, validation.getErrorMsg());
}
rfcRelation.setValidated(true);
}
private String generateRelComments(String fromCiName, String fromCiClass, String toCiName, String toCiClass) {
Map<String, String> strMap = new HashMap<String, String>();
strMap.put("fromCiName", fromCiName);
strMap.put("fromCiClass", fromCiClass);
strMap.put("toCiName", toCiName);
strMap.put("toCiClass", toCiClass);
return gson.toJson(strMap);
}
public int deleteManifestPlatform(CmsCI platformCi, CmsCIRelation bindingRel, String nsPath, int startExecOrder, String userId){
int maxExecOrder = 0;
List<CmsCIRelation> mfstPlatComponents = cmProcessor.getFromCIRelations(platformCi.getCiId(), null, "Requires", null);
if (mfstPlatComponents.size() > 0) {
//List<BomRfc> boms = new ArrayList<BomRfc>();
String platNsPath = null;
if (platformCi.getCiClassName().equals("manifest.Iaas")) {
platNsPath = nsPath + "/" + platformCi.getCiName();
} else {
platNsPath = nsPath + "/" + platformCi.getCiName() + "/" + platformCi.getAttribute("major_version").getDjValue();
}
long numOfBoms = cmProcessor.getCountBy3(platNsPath, null, null, false);
if (numOfBoms >0) {
logger.info(nsPath + ">>>" + platformCi.getCiName() + ", finding obsolete boms");
Map<String, CmsCI> existingCIs = getExistingCis(bindingRel.getToCiId(), platNsPath);
maxExecOrder = findObsolete(new ArrayList<BomRfc>(), bindingRel, platNsPath, startExecOrder, existingCIs, userId, true);
} else {
// there is no boms lets cleanup any open rfcs if any
List<CmsRfcRelation> deployedTorfcRels = rfcProcessor.getOpenToRfcRelationByTargetClazzNoAttrs(bindingRel.getToCiId(), "base.DeployedTo", null, null);
for (CmsRfcRelation deployedToRel : deployedTorfcRels) {
List<CmsRfcRelation> rfcRels = rfcProcessor.getOpenRfcRelationBy2(deployedToRel.getFromCiId(), null, null, null);
rfcRels.addAll(rfcProcessor.getOpenRfcRelationBy2(null, deployedToRel.getFromCiId(), null, null));
for (CmsRfcRelation rfcRel : rfcRels) {
rfcProcessor.rmRfcRelationFromRelease(rfcRel.getRfcId());
}
rfcProcessor.rmRfcCiFromRelease(deployedToRel.getFromRfcId());
}
}
if (platformCi.getCiState().equalsIgnoreCase("pending_deletion") && numOfBoms==0) {
//if no bom exists - delete the manifest platform for real
for (CmsCIRelation mfstPlatComponentRel : mfstPlatComponents) {
cmProcessor.deleteCI(mfstPlatComponentRel.getToCiId(), true, userId);
}
cmProcessor.deleteCI(platformCi.getCiId(), true, userId);
trUtil.deleteNs(platNsPath);
}
}
return maxExecOrder;
}
private CmsRfcCI bootstrapRfc(BomRfc bom, CmsRfcCI existingRfc, CmsCI existingBomCi, String nsPath) {
CmsRfcCI newRfc = new CmsRfcCI();
newRfc.setNsPath(nsPath);
String targetClazzName = "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName());
CmsClazz targetClazz = mdProcessor.getClazz(targetClazzName);
newRfc.setCiClassId(targetClazz.getClassId());
newRfc.setCiClassName(targetClazz.getClassName());
//bootstrap the default values from Class definition and populate map for checks
Map<String, CmsClazzAttribute> clazzAttrs = new HashMap<String, CmsClazzAttribute>();
for (CmsClazzAttribute clAttr : targetClazz.getMdAttributes()) {
if (clAttr.getDefaultValue() != null) {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(clAttr.getAttributeId());
rfcAttr.setAttributeName(clAttr.getAttributeName());
rfcAttr.setNewValue(clAttr.getDefaultValue());
newRfc.addAttribute(rfcAttr);
}
clazzAttrs.put(clAttr.getAttributeName(), clAttr);
}
//populate values from manifest obj if it's not null
applyCiToRfc(newRfc, bom, clazzAttrs, true);
newRfc.setExecOrder(bom.execOrder);
setCiId(newRfc, existingRfc, existingBomCi);
return newRfc;
}
/*
private void reverseExecOrder(List<BomRfc> boms, int startOrder) {
int maxOrder = getMaxExecOrder(boms);
for (BomRfc bom:boms) {
bom.execOrder = maxOrder-bom.execOrder+startOrder;
}
}
*/
private void processOrder(BomRfc bom, Map<String, BomRfc> bomMap, int order, int recursionDepth) {
if (recursionDepth >= MAX_RECUSION_DEPTH) {
String err = "Circular dependency detected, (level - " + recursionDepth + "),\n please check the platform diagram for " + extractPlatformNameFromNsPath(bom.mfstCi.getNsPath());
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
bom.execOrder = (order > bom.execOrder) ? order : bom.execOrder;
order += 1;
for (BomLink link : bom.toLinks) {
BomRfc parentBom = bomMap.get(link.fromNodeId);
processOrder(parentBom, bomMap, order, recursionDepth + 1);
}
}
private int getMaxExecOrder(List<BomRfc> boms) {
int maxExecOrder = 0;
for (BomRfc bom : boms) {
maxExecOrder = (bom.execOrder > maxExecOrder) ? bom.execOrder : maxExecOrder;
}
return maxExecOrder;
}
private int getMaxRfcExecOrder(List<BomRfc> boms) {
int maxExecOrder = 0;
for (BomRfc bom : boms) {
if (bom.rfc != null && bom.rfc.getRfcId()>0) {
maxExecOrder = (bom.execOrder > maxExecOrder) ? bom.execOrder : maxExecOrder;
}
}
return maxExecOrder;
}
private void processEntryPointRel(long platformCiId, Map<Long, List<BomRfc>> bomsMap, String nsPath, String user, ExistingRels existingRels) {
List<CmsCIRelation> entryPoints = cmProcessor.getFromCIRelationsNaked(platformCiId, null, "Entrypoint", null);
for (CmsCIRelation epRel : entryPoints) {
if (bomsMap.containsKey(epRel.getToCiId())) {
for (BomRfc bom : bomsMap.get(epRel.getToCiId())) {
if (bom.rfc != null) {
CmsRfcRelation entryPoint = bootstrapRelationRfc(platformCiId, bom.rfc.getCiId(),"base.Entrypoint", nsPath, existingRels);
cmRfcMrgProcessor.upsertRelationRfc(entryPoint, user, "dj");
}
}
}
}
}
private void processManagedViaRels(List<CmsCIRelation> mfstCiRels, Map<Long, List<BomRfc>> bomsMap, String nsPath, String user, ExistingRels existingRels, Long releaseId) {
long nsId = trUtil.verifyAndCreateNS(nsPath);
List<CmsLink> dependsOnlinks = cmRfcMrgProcessor.getLinks(nsPath, "bom.DependsOn");
//convert to map for traversing the path
Map<Long, Map<String,List<Long>>> dependsOnMap = new HashMap<Long, Map<String,List<Long>>>();
for (CmsLink link : dependsOnlinks) {
if (!dependsOnMap.containsKey(link.getFromCiId())) {
dependsOnMap.put(link.getFromCiId(), new HashMap<String,List<Long>>());
}
if (!dependsOnMap.get(link.getFromCiId()).containsKey(link.getToClazzName())) {
dependsOnMap.get(link.getFromCiId()).put(link.getToClazzName(), new ArrayList<Long>());
}
dependsOnMap.get(link.getFromCiId()).get(link.getToClazzName()).add(link.getToCiId());
}
Set<String> relRfcGoids = new HashSet<String>();
for (CmsCIRelation mfstCiRel : mfstCiRels) {
CmsCI mfstCi = mfstCiRel.getToCi();
//first lets check if we even have an add rfc for this Ci
//if (newRfcExists(mfstCi.getCiId(), bomsMap)) {
List<CmsCIRelation> mfstMngViaRels = cmProcessor.getFromCIRelationsNaked(mfstCi.getCiId(), null, "ManagedVia", null);
for (CmsCIRelation mfstMngViaRel : mfstMngViaRels) {
// lets find the path
//List<String> pathClasses = getTraversalPath(mfstMngViaRel);
List<String> pathClasses = getDpOnPath(mfstMngViaRel.getFromCiId(), mfstMngViaRel.getToCiId());
if (pathClasses.size()==0) {
String err = "Can not traverse ManagedVia relation using DependsOn path from ci " + mfstMngViaRel.getFromCiId() + ", to ci " + mfstMngViaRel.getToCiId() + "\n";
err += mfstMngViaRel.getComments();
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
for (BomRfc bomRfc : bomsMap.get(mfstCi.getCiId())) {
//for this rfc we need to traverse by the DependsOn path down to ManagedVia Ci and create the relation\
//Now this is tricky since it could get resolved as a tree so we need to use recursion
LinkedList<String> path = new LinkedList<String>();
path.addAll(pathClasses);
if (bomRfc.rfc != null) {
List<Long> targets = getLeafsByPath(bomRfc.rfc.getCiId(), path,mfstMngViaRel.getToCiId(), dependsOnMap);
Map<Long, BomRfc> targetMap = new HashMap<Long, BomRfc>();
for (BomRfc targetBom : bomsMap.get(mfstMngViaRel.getToCiId())) {
targetMap.put(targetBom.rfc.getCiId(), targetBom);
}
for (long managedViaCiId : targets) {
CmsCIRelation existingRel = existingRels.getExistingRel(BOM_MANAGED_VIA_RELATION_NAME, bomRfc.rfc.getCiId(), managedViaCiId);
//cmProcessor.getFromToCIRelationsNaked(bomRfc.rfc.getCiId(), "bom.ManagedVia", managedViaCiId);
if (existingRel == null) {
CmsRfcRelation managedVia = bootstrapRelationRfc(bomRfc.rfc.getCiId(), managedViaCiId, "bom.ManagedVia", nsPath, existingRels);
managedVia.setNsId(nsId);
managedVia.setReleaseId(bomRfc.rfc.getReleaseId());
if (!relRfcGoids.contains(managedVia.getRelationGoid())) {
if (targetMap.containsKey(managedViaCiId)) {
CmsRfcCI toCiRfc = targetMap.get(managedViaCiId).rfc;
managedVia.setComments(generateRelComments(bomRfc.rfc.getCiName(), bomRfc.rfc.getCiClassName(), toCiRfc.getCiName(), toCiRfc.getCiClassName()));
if (bomRfc.rfc != null && bomRfc.rfc.getRfcId() > 0) {
managedVia.setFromRfcId(bomRfc.rfc.getRfcId());
}
if (toCiRfc.getRfcId() > 0) {
managedVia.setToRfcId(toCiRfc.getRfcId());
}
//managedVia.setValidated(true);
createBomRelationRfc(managedVia,existingRels,releaseId);
relRfcGoids.add(managedVia.getRelationGoid());
//cmRfcMrgProcessor.upsertRfcRelationNoCheck(managedVia, user, "dj");
}
}
}
}
}
//}
}
}
}
};
private void processSecuredByRels(List<CmsCIRelation> mfstCiRels, Map<Long, List<BomRfc>> bomsMap, String nsPath, String user, ExistingRels existingRels, Long releaseId) {
long nsId = trUtil.verifyAndCreateNS(nsPath);
for (CmsCIRelation mfstCiRel : mfstCiRels) {
CmsCI mfstCi = mfstCiRel.getToCi();
List<CmsCIRelation> mfstSecuredByRels = cmProcessor.getFromCIRelationsNaked(mfstCi.getCiId(), null, "SecuredBy", null);
for (CmsCIRelation mfstSecuredByRel : mfstSecuredByRels) {
for (BomRfc fromBomRfc : bomsMap.get(mfstCi.getCiId())) {
for (BomRfc toBomRfc : bomsMap.get(mfstSecuredByRel.getToCiId())) {
CmsRfcRelation securedBy = bootstrapRelationRfc(fromBomRfc.rfc.getCiId(), toBomRfc.rfc.getCiId(), "bom.SecuredBy", nsPath, existingRels);
securedBy.setComments(generateRelComments(fromBomRfc.rfc.getCiName(), fromBomRfc.rfc.getCiClassName(), toBomRfc.rfc.getCiName(), toBomRfc.rfc.getCiClassName()));
securedBy.setCreatedBy(user);
securedBy.setUpdatedBy(user);
securedBy.setNsId(nsId);
validateRelRfc(securedBy, fromBomRfc.rfc.getCiClassId(), toBomRfc.rfc.getCiClassId());
if (fromBomRfc.rfc.getRfcId() > 0) {
securedBy.setFromRfcId(fromBomRfc.rfc.getRfcId());
}
if (toBomRfc.rfc.getRfcId() > 0 ) {
securedBy.setToRfcId(toBomRfc.rfc.getRfcId());
}
createBomRelationRfc(securedBy, existingRels, releaseId);
//cmRfcMrgProcessor.upsertRelationRfc(securedBy, user, "dj");
}
}
}
}
};
private List<Long> getLeafsByPath(long startCiId, LinkedList<String> path, long targetMfstCiId, Map<Long, Map<String,List<Long>>> dependsOnMap) {
List<Long> listOfTargets = new ArrayList<Long>();
if (path.size() == 0) {
//we reached end of the path but seems like there are multiple routes, but at this point we are good
return listOfTargets;
}
String nextMfstClass = path.poll();
String bomClass = "bom." + trUtil.getLongShortClazzName(nextMfstClass);
//List<CmsRfcRelation> dependsOnRels = cmRfcMrgProcessor.getFromCIRelationsNakedNoAttrs(startCiId, null, "DependsOn", bomClass);
List<Long> targets = new ArrayList<Long>();
if (dependsOnMap.containsKey(startCiId)) {
if (dependsOnMap.get(startCiId).containsKey(bomClass)) {
targets.addAll(dependsOnMap.get(startCiId).get(bomClass));
}
}
if (path.size() ==0) {
//this should be our target list
for (long toCiId : targets) {
//lets check if this guy is related to the right mfstCi
//TODO this could be not nessesary
//if (cmRfcMrgProcessor.getToCIRelationsNakedNoAttrs(rel.getToCiId(), null, "RealizedAs", nextMfstClass).size() >0) {
listOfTargets.add(toCiId);
//}
}
} else {
for (long toCiId : targets) {
listOfTargets.addAll(getLeafsByPath(toCiId, new LinkedList<String>(path), targetMfstCiId, dependsOnMap));
}
}
return listOfTargets;
}
private List<String> getDpOnPath(long fromId, long endId) {
List<String> pathClasses = new ArrayList<String>();
List<CmsCIRelation> dponRels = cmProcessor.getFromCIRelations(fromId, null, "DependsOn", null);
for (CmsCIRelation dponRel : dponRels) {
if (dponRel.getToCi().getCiId() == endId) {
pathClasses.add(dponRel.getToCi().getCiClassName());
return pathClasses;
} else {
List<String> downClasses = getDpOnPath(dponRel.getToCiId(), endId);
if (downClasses.size() > 0) {
pathClasses.add(dponRel.getToCi().getCiClassName());
pathClasses.addAll(downClasses);
return pathClasses;
}
}
}
return pathClasses;
}
private Map<Long, List<BomRfc>> buildMfstToBomRfcMap(List<BomRfc> boms) {
Map<Long, List<BomRfc>> map = new HashMap<Long, List<BomRfc>>();
for (BomRfc bom : boms) {
if (!map.containsKey(bom.manifestCiId)) {
map.put(bom.manifestCiId, new ArrayList<BomRfc>());
}
map.get(bom.manifestCiId).add(bom);
}
return map;
}
private List<BomRfc> processNode(BomRfc node, Map<String, Integer> namesMap, CmsCIRelation binding, Map<String, List<String>> mfstIdEdge2nodeId, Map<Long,Map<String,List<CmsCIRelation>>> manifestDependsOnRels, int edgeNum, boolean usePercent, int recursionDepth){
if (recursionDepth >= MAX_RECUSION_DEPTH) {
String err = "Circular dependency detected, (level - " + recursionDepth + "),\n please check the platform diagram for " + extractPlatformNameFromNsPath(node.mfstCi.getNsPath());
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
if (edgeNum >= MAX_NUM_OF_EDGES) {
String err = "Max number of edges is reached - " + edgeNum + "\n please check the platform diagram for " + extractPlatformNameFromNsPath(node.mfstCi.getNsPath());
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
logger.info("working on " + node.ciName + "; recursion depth - " + recursionDepth);
List<BomRfc> newBoms = new ArrayList<BomRfc>();
if (node.isProcessed) {
return newBoms;
}
List<CmsCIRelation> mfstFromRels = null;
List<CmsCIRelation> mfstToRels = null;
if (!manifestDependsOnRels.containsKey(node.manifestCiId)) {
Map<String,List<CmsCIRelation>> rels = new HashMap<String,List<CmsCIRelation>>();
rels.put("from", cmProcessor.getFromCIRelations(node.manifestCiId, "manifest.DependsOn", null));
rels.put("to", cmProcessor.getToCIRelations(node.manifestCiId, "manifest.DependsOn", null));
manifestDependsOnRels.put(node.manifestCiId, rels);
}
mfstFromRels = manifestDependsOnRels.get(node.manifestCiId).get("from");
mfstToRels = manifestDependsOnRels.get(node.manifestCiId).get("to");;
//logger.info("got " + mfstFromRels.size() + " 'from' relations");
//logger.info("got " + mfstToRels.size() + " 'to' relations");
for (CmsCIRelation fromRel : mfstFromRels) {
int numEdges = 0;
int percent = 100;
int current = Integer.valueOf(fromRel.getAttribute("current").getDfValue());
if (current >1 && binding.getAttributes().containsKey("pct_scale") && binding.getAttribute("pct_scale") != null) {
int pctScale = Integer.valueOf(binding.getAttribute("pct_scale").getDjValue());
current = (int)Math.ceil(current*(pctScale/100.0)) ;
}
if (usePercent && fromRel.getAttribute("pct_dpmt") != null) {
percent = Integer.valueOf(fromRel.getAttribute("pct_dpmt").getDjValue());
numEdges = (int)Math.floor(current*(percent/100.0)) ;
} else {
numEdges = current;
}
int edgeNumLocal = edgeNum;
//special case if the relation marked as converge
if (fromRel.getAttribute(CONVERGE_RELATION_ATTRIBUTE) != null
&& Boolean.valueOf(fromRel.getAttribute(CONVERGE_RELATION_ATTRIBUTE).getDfValue())) {
edgeNumLocal = 1;
numEdges = 1;
}
String key = String.valueOf(fromRel.getToCi().getCiId()) + "-" + edgeNumLocal;
if (!mfstIdEdge2nodeId.containsKey(key)
|| numEdges > 1) {
//for (int i=node.getExisitngFromLinks(fromRel.getToCi().getCiId()).size()+1; i<=numEdges; i++) {
for (int i=node.getExisitngFromLinks(fromRel.getToCi().getCiId()).size() + 1 + ((edgeNumLocal-1) * numEdges); i<=numEdges + ((edgeNumLocal-1) * numEdges); i++) {
int newEdgeNum = (i > edgeNumLocal) ? i : edgeNumLocal;
BomRfc newBom = bootstrapNewBom(fromRel.getToCi(), namesMap, binding.getToCiId(), newEdgeNum);
BomLink link = new BomLink();
link.fromNodeId = node.nodeId;
link.fromMfstCiId = node.manifestCiId;
link.toNodeId = newBom.nodeId;
link.toMfstCiId = newBom.manifestCiId;
node.fromLinks.add(link);
newBom.toLinks.add(link);
newBoms.add(newBom);
key = String.valueOf(newBom.manifestCiId)+ "-" + newEdgeNum;
if (!mfstIdEdge2nodeId.containsKey(key)) mfstIdEdge2nodeId.put(key, new ArrayList<String>());
mfstIdEdge2nodeId.get(key).add(newBom.nodeId);
newBoms.addAll(processNode(newBom, namesMap, binding, mfstIdEdge2nodeId, manifestDependsOnRels, newEdgeNum, usePercent, recursionDepth + 1));
}
} else {
for (String toNodeId : mfstIdEdge2nodeId.get(key)) {
if (node.getExisitngFromLinks(fromRel.getToCi().getCiId()).size() == 0 ) {
BomLink link = new BomLink();
link.fromNodeId = node.nodeId;
link.fromMfstCiId = node.manifestCiId;
link.toNodeId = toNodeId;
link.toMfstCiId = fromRel.getToCi().getCiId();
node.fromLinks.add(link);
}
}
}
}
for (CmsCIRelation toRel : mfstToRels) {
String key = String.valueOf(toRel.getFromCi().getCiId()) + "-" + edgeNum;
if (!mfstIdEdge2nodeId.containsKey(key)) {
mfstIdEdge2nodeId.put(key, new ArrayList<String>());
if (node.getExisitngToLinks(toRel.getFromCi().getCiId()).size() == 0
|| ((toRel.getAttribute(CONVERGE_RELATION_ATTRIBUTE) != null
&& Boolean.valueOf(toRel.getAttribute(CONVERGE_RELATION_ATTRIBUTE).getDfValue()))
&& node.getExisitngToLinks(toRel.getFromCi().getCiId()
+ getName(toRel.getFromCi().getCiName(), namesMap, binding.getToCiId(), edgeNum)) == null)) {
BomRfc newBom = bootstrapNewBom(toRel.getFromCi(), namesMap, binding.getToCiId(), edgeNum);
BomLink link = new BomLink();
link.toNodeId = node.nodeId;
link.toMfstCiId = node.manifestCiId;
link.fromNodeId = newBom.nodeId;
link.fromMfstCiId = newBom.manifestCiId;
node.toLinks.add(link);
newBom.fromLinks.add(link);
newBoms.add(newBom);
mfstIdEdge2nodeId.get(String.valueOf(newBom.manifestCiId)+ "-" + edgeNum).add(newBom.nodeId);
newBoms.addAll(processNode(newBom, namesMap, binding, mfstIdEdge2nodeId, manifestDependsOnRels, edgeNum, usePercent, recursionDepth + 1));
}
} else {
for (String fromNodeId : mfstIdEdge2nodeId.get(key)) {
if (node.getExisitngToLinks(toRel.getFromCi().getCiId()).size() == 0 ) {
BomLink link = new BomLink();
link.toNodeId = node.nodeId;
link.toMfstCiId = node.manifestCiId;
link.fromNodeId = fromNodeId;
link.fromMfstCiId = toRel.getFromCi().getCiId();
node.toLinks.add(link);
}
}
}
}
node.isProcessed = true;
return newBoms;
}
private BomRfc bootstrapNewBom(CmsCI ci, Map<String, Integer> namesMap, long bindingId, int edgeNum) {
BomRfc newBom = new BomRfc();
newBom.manifestCiId = ci.getCiId();
newBom.mfstCi = ci;
newBom.ciName = getName(ci.getCiName(), namesMap, bindingId, edgeNum);
newBom.nodeId = newBom.manifestCiId + newBom.ciName;
return newBom;
}
private String getName(String base, Map<String, Integer> namesMap, long bindingId, int edgeNum) {
return base + "-" + bindingId + "-" + edgeNum;
}
private void applyCiToRfc(CmsRfcCI newRfc, BomRfc bom, Map<String, CmsClazzAttribute> mdAttrs, boolean checkExpression) {
newRfc.setCiName(bom.ciName);
newRfc.setComments(bom.mfstCi.getComments());
for (CmsCIAttribute mfstAttr : bom.mfstCi.getAttributes().values()) {
if (mdAttrs.containsKey(mfstAttr.getAttributeName())) {
if (mfstAttr.getDfValue() != null) {
if (newRfc.getAttribute(mfstAttr.getAttributeName()) != null) {
newRfc.getAttribute(mfstAttr.getAttributeName()).setNewValue(mfstAttr.getDfValue());
newRfc.getAttribute(mfstAttr.getAttributeName()).setComments(mfstAttr.getComments());
} else {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(mdAttrs.get(mfstAttr.getAttributeName()).getAttributeId());
rfcAttr.setAttributeName(mfstAttr.getAttributeName());
rfcAttr.setNewValue(mfstAttr.getDfValue());
newRfc.addAttribute(rfcAttr);
}
}
}
}
}
private void setCiId(CmsRfcCI rfc, CmsRfcCI existingRfc, CmsCI existingBomCi) {
if (existingRfc != null) {
rfc.setCiId(existingRfc.getCiId());
rfc.setRfcId(existingRfc.getRfcId());
rfc.setReleaseId(existingRfc.getReleaseId());
} else if (existingBomCi != null) {
rfc.setCiId(existingBomCi.getCiId());
rfc.setCiState(existingBomCi.getCiState());
}
}
private CmsRfcRelation bootstrapRelationRfc(long fromCiId, long toCiId, String relName, String nsPath, ExistingRels existingRels) {
CmsRfcRelation newRfc = new CmsRfcRelation();
newRfc.setNsPath(nsPath);
CmsRelation targetRelation = mdProcessor.getRelation(relName);
newRfc.setRelationId(targetRelation.getRelationId());
newRfc.setRelationName(targetRelation.getRelationName());
//bootstrap the default values from Class definition
for (CmsRelationAttribute relAttr : targetRelation.getMdAttributes()) {
if (relAttr.getDefaultValue() != null) {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(relAttr.getAttributeId());
rfcAttr.setAttributeName(relAttr.getAttributeName());
rfcAttr.setNewValue(relAttr.getDefaultValue());
newRfc.addAttribute(rfcAttr);
}
}
newRfc.setFromCiId(fromCiId);
newRfc.setToCiId(toCiId);
newRfc.setRelationGoid(String.valueOf(newRfc.getFromCiId()) + '-' + String.valueOf(newRfc.getRelationId()) + '-' +String.valueOf(newRfc.getToCiId()));
setCiRelationId(newRfc, existingRels.getOpenRelRfc(relName, fromCiId, toCiId), existingRels.getExistingRel(relName, fromCiId, toCiId));
return newRfc;
}
private CmsRfcRelation bootstrapRelationRfcWithAttributes(long fromCiId, long toCiId, String relName, String nsPath, Map<String,String> attrs, ExistingRels existingRels) {
CmsRfcRelation newRfc = new CmsRfcRelation();
newRfc.setNsPath(nsPath);
CmsRelation targetRelation = mdProcessor.getRelation(relName);
newRfc.setRelationId(targetRelation.getRelationId());
newRfc.setRelationName(targetRelation.getRelationName());
//bootstrap the default values from Class definition
for (CmsRelationAttribute relAttr : targetRelation.getMdAttributes()) {
if (relAttr.getDefaultValue() != null || attrs.containsKey(relAttr.getAttributeName())) {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(relAttr.getAttributeId());
rfcAttr.setAttributeName(relAttr.getAttributeName());
if (attrs.containsKey(relAttr.getAttributeName())) {
rfcAttr.setNewValue(attrs.get(relAttr.getAttributeName()));
} else if (relAttr.getDefaultValue() != null){
rfcAttr.setNewValue(relAttr.getDefaultValue());
}
newRfc.addAttribute(rfcAttr);
}
}
newRfc.setFromCiId(fromCiId);
newRfc.setToCiId(toCiId);
setCiRelationId(newRfc, existingRels.getOpenRelRfc(relName, fromCiId, toCiId), existingRels.getExistingRel(relName, fromCiId, toCiId));
return newRfc;
}
private void setCiRelationId(CmsRfcRelation rfc, CmsRfcRelation existingRfc, CmsCIRelation existingRel) {
if (existingRfc != null) {
rfc.setCiRelationId(existingRfc.getCiRelationId());
rfc.setRfcId(existingRfc.getRfcId());
rfc.setReleaseId(existingRfc.getReleaseId());
} else if (existingRel != null){
rfc.setCiRelationId(existingRel.getCiRelationId());
}
}
private String extractPlatformNameFromNsPath(String ns) {
String[] nsParts = ns.split("/");
return nsParts[nsParts.length-2] + "(" + nsParts[nsParts.length-1] + ")";
}
private class BomRfc {
long manifestCiId;
CmsCI mfstCi;
int execOrder=0;
String ciName;
String nodeId;
CmsRfcCI rfc;
boolean isProcessed = false;
List<BomLink> fromLinks = new ArrayList<BomLink>();
List<BomLink> toLinks = new ArrayList<BomLink>();
public List<BomLink> getExisitngFromLinks(long toMfstCiId) {
List<BomLink> links = new ArrayList<BomLink>();
for (BomLink link : fromLinks) {
if (link.toMfstCiId == toMfstCiId) {
links.add(link);
}
}
return links;
}
public List<BomLink> getExisitngToLinks(long fromMfstCiId) {
List<BomLink> links = new ArrayList<BomLink>();
for (BomLink link : toLinks) {
if (link.fromMfstCiId == fromMfstCiId) {
links.add(link);
}
}
return links;
}
public BomLink getExisitngToLinks(String fromNodeId) {
for (BomLink link : toLinks) {
if (link.fromNodeId.equals(fromNodeId)) {
return link;
}
}
return null;
}
}
private class BomLink {
String fromNodeId;
long fromMfstCiId;
long toMfstCiId;
String toNodeId;
}
private class ExistingRels {
private Map<String, Map<String,CmsCIRelation>> existingRels;
private Map<String, Map<String,CmsRfcRelation>> openRelRfcs;
ExistingRels(String nsPath) {
this.existingRels = getExistingRelations(nsPath);
this.openRelRfcs = getOpenRelationsRfcs(nsPath);
}
protected CmsCIRelation getExistingRel(String relName, long fromCiId, long toCiId) {
if (existingRels.containsKey(relName)) {
return existingRels.get(relName).get(fromCiId + ":" + toCiId);
}
return null;
}
protected Collection<CmsCIRelation> getExistingRel(String relName) {
if (existingRels.containsKey(relName)) {
return existingRels.get(relName).values();
}
return new ArrayList<CmsCIRelation>(0);
}
/*
protected Collection<CmsRfcRelation> getExistingRelRfc(String relName) {
if (openRelRfcs.containsKey(relName)) {
return openRelRfcs.get(relName).values();
}
return new ArrayList<CmsRfcRelation>(0);
}
*/
protected void addRelRfc(CmsRfcRelation relRfc) {
String localKey = relRfc.getFromCiId() + ":" + relRfc.getToCiId();
if (!openRelRfcs.containsKey(relRfc.getRelationName())) {
openRelRfcs.put(relRfc.getRelationName(), new HashMap<String,CmsRfcRelation>());
}
openRelRfcs.get(relRfc.getRelationName()).put(localKey, relRfc);
}
protected CmsRfcRelation getOpenRelRfc(String relName, long fromCiId, long toCiId) {
if (openRelRfcs.containsKey(relName)) {
return openRelRfcs.get(relName).get(fromCiId + ":" + toCiId);
}
return null;
}
}
}
|
src/main/java/com/oneops/transistor/service/BomRfcBulkProcessor.java
|
/*******************************************************************************
*
* Copyright 2015 Walmart, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*******************************************************************************/
package com.oneops.transistor.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.oneops.cms.cm.domain.CmsCI;
import com.oneops.cms.cm.domain.CmsCIAttribute;
import com.oneops.cms.cm.domain.CmsCIRelation;
import com.oneops.cms.cm.domain.CmsCIRelationAttribute;
import com.oneops.cms.cm.domain.CmsLink;
import com.oneops.cms.cm.service.CmsCmProcessor;
import com.oneops.cms.dj.domain.CmsRfcAttribute;
import com.oneops.cms.dj.domain.CmsRfcCI;
import com.oneops.cms.dj.domain.CmsRfcRelation;
import com.oneops.cms.dj.service.CmsCmRfcMrgProcessor;
import com.oneops.cms.dj.service.CmsRfcProcessor;
import com.oneops.cms.exceptions.DJException;
import com.oneops.cms.md.domain.CmsClazz;
import com.oneops.cms.md.domain.CmsClazzAttribute;
import com.oneops.cms.md.domain.CmsRelation;
import com.oneops.cms.md.domain.CmsRelationAttribute;
import com.oneops.cms.md.service.CmsMdProcessor;
import com.oneops.cms.util.CIValidationResult;
import com.oneops.cms.util.CmsDJValidator;
import com.oneops.cms.util.CmsError;
import com.oneops.cms.util.CmsUtil;
import com.oneops.transistor.exceptions.TransistorException;
public class BomRfcBulkProcessor {
static Logger logger = Logger.getLogger(BomRfcBulkProcessor.class);
private static final Map<String, Integer> priorityMap = new HashMap<String, Integer>();
static {
//priorityMap.put("Compute", 2);
//priorityMap.put("Storage", 2);
priorityMap.put("Keypair", 1);
}
private static final int priorityMax = 1;
private static final String BOM_CLOUD_RELATION_NAME = "base.DeployedTo";
private static final String BOM_REALIZED_RELATION_NAME = "base.RealizedAs";
private static final String BOM_DEPENDS_ON_RELATION_NAME = "bom.DependsOn";
private static final String BOM_MANAGED_VIA_RELATION_NAME = "bom.ManagedVia";
private static final int MAX_RECUSION_DEPTH = Integer.valueOf(System.getProperty("com.oneops.transistor.MaxRecursion", "50"));
private static final int MAX_NUM_OF_EDGES = Integer.valueOf(System.getProperty("com.oneops.transistor.MaxEdges", "100000"));
private CmsCmProcessor cmProcessor;
private CmsMdProcessor mdProcessor;
private CmsRfcProcessor rfcProcessor;
private CmsCmRfcMrgProcessor cmRfcMrgProcessor;
private CmsDJValidator djValidator;
private Gson gson = new Gson();
//private CmsNsManager nsManager;
private TransUtil trUtil;
private CmsUtil cmsUtil;
public void setCmsUtil(CmsUtil cmsUtil) {
this.cmsUtil = cmsUtil;
}
public void setTrUtil(TransUtil trUtil) {
this.trUtil = trUtil;
}
public void setCmProcessor(CmsCmProcessor cmProcessor) {
this.cmProcessor = cmProcessor;
}
public void setMdProcessor(CmsMdProcessor mdProcessor) {
this.mdProcessor = mdProcessor;
}
public void setRfcProcessor(CmsRfcProcessor rfcProcessor) {
this.rfcProcessor = rfcProcessor;
}
public void setCmRfcMrgProcessor(CmsCmRfcMrgProcessor cmRfcMrgProcessor) {
this.cmRfcMrgProcessor = cmRfcMrgProcessor;
}
public void setDjValidator(CmsDJValidator djValidator) {
this.djValidator = djValidator;
}
public int processManifestPlatform(CmsCI platformCi, CmsCIRelation bindingRel, String nsPath, int startExecOrder, Map<String,String> globalVars, Map<String,String> cloudVars,String userId, boolean usePercent){
return processManifestPlatform(platformCi, bindingRel, nsPath, startExecOrder, globalVars, cloudVars, userId, false, usePercent);
}
public int processManifestPlatform(CmsCI platformCi, CmsCIRelation bindingRel, String nsPath, int startExecOrder, Map<String,String> globalVars, Map<String,String> cloudVars, String userId, boolean createPlatNs, boolean usePercent){
if (startExecOrder <= priorityMax) startExecOrder = priorityMax+1;
long startingTime = System.currentTimeMillis();
int maxExecOrder = 0;
if (createPlatNs) {
if (platformCi.getCiClassName().equals("manifest.Iaas")) {
nsPath = nsPath + "/" + platformCi.getCiName();
} else {
nsPath = nsPath + "/" + platformCi.getCiName() + "/" + platformCi.getAttribute("major_version").getDjValue();
}
trUtil.verifyAndCreateNS(nsPath);
}
logger.info(nsPath + " >>> Start working on " + platformCi.getCiName() + ", cloud - " + bindingRel.getToCi().getCiName());
Map<String,String> localVars = cmsUtil.getLocalVars(platformCi);
List<CmsCIRelation> mfstPlatComponents = cmProcessor.getFromCIRelations(platformCi.getCiId(), null, "Requires", null);
if (mfstPlatComponents.size() > 0) {
String manifestNs = mfstPlatComponents.get(0).getNsPath();
boolean isPartial = isPartialDeployment(manifestNs);
List<BomRfc> boms = new ArrayList<BomRfc>();
Map<String, List<String>> mfstId2nodeId = new HashMap<String,List<String>>();
CmsCI startingPoint = mfstPlatComponents.get(0).getToCi();
Map<String, Integer> namesMap = new HashMap<String, Integer>();
Map<Long,Map<String,List<CmsCIRelation>>> manifestDependsOnRels = new HashMap<Long,Map<String,List<CmsCIRelation>>>();
while (startingPoint != null) {
BomRfc newBom = bootstrapNewBom(startingPoint, namesMap, bindingRel.getToCiId(), 1);
boms.add(newBom);
mfstId2nodeId.put(String.valueOf(newBom.manifestCiId) + "-" + 1, new ArrayList<String>(Arrays.asList(newBom.nodeId)));
boms.addAll(processNode(newBom, namesMap, bindingRel, mfstId2nodeId, manifestDependsOnRels, 1, usePercent, 1));
startingPoint = getStartingPoint(mfstPlatComponents, boms);
}
// this is needed to work around ibatis
// if there is no any updates within current transaction
// ibatis would not return a new object as query result but instead a ref to the previousely created one
// if it was modified outside - the changes will not be reset
for(BomRfc bom : boms) {
bom.mfstCi = trUtil.cloneCI(bom.mfstCi);
}
//process vars
processVars(boms, cloudVars, globalVars, localVars);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", starting creating rfcs");
long bomCreationStartTime = System.currentTimeMillis();
Long releaseId = null;
ExistingRels existingRels = new ExistingRels(nsPath);
Map<String, CmsCI> existingCIs = getExistingCis(bindingRel.getToCiId(), nsPath);
Map<String, CmsRfcCI> existingRFCs = getOpenRFCs(nsPath);
maxExecOrder = createBomRfcsAndRels(boms, nsPath, bindingRel, startExecOrder, isPartial, userId, existingRels, existingCIs, existingRFCs, releaseId);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with main RFCs and relations, time spent - " + (System.currentTimeMillis() - bomCreationStartTime));
Map<Long, List<BomRfc>> bomsMap = buildMfstToBomRfcMap(boms);
long mngviaStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", processing managed via");
processManagedViaRels(mfstPlatComponents,bomsMap,nsPath, userId, existingRels, releaseId);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with managed via, time spent - " + (System.currentTimeMillis() - mngviaStartTime));
long secByStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", processing secured by");
processSecuredByRels(mfstPlatComponents,bomsMap,nsPath, userId, existingRels, releaseId);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with secured by, time spent - " + (System.currentTimeMillis() - secByStartTime));
long entryPointStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", processing entry point");
processEntryPointRel(platformCi.getCiId(),bomsMap, nsPath, userId, existingRels);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with entry point, time spent - " + (System.currentTimeMillis() - entryPointStartTime));
if (!usePercent || !isPartial) {
if (maxExecOrder == 0) maxExecOrder++;
long obsoleteStartTime = System.currentTimeMillis();
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", finding obsolete boms");
maxExecOrder = findObsolete(boms, bindingRel, nsPath, maxExecOrder, existingCIs, userId, false);
logger.info(nsPath + " >>> " + platformCi.getCiName() + ", Done with obsolete boms, time spent - " + (System.currentTimeMillis() - obsoleteStartTime));
}
if (logger.isDebugEnabled()) {
for(BomRfc bom : boms) {
logger.debug(bom.ciName + "::" + bom.execOrder);
}
}
//help gc a little bit
existingRels = null;
existingCIs = null;
existingRFCs = null;
}
long timeTook = System.currentTimeMillis() - startingTime;
logger.info(nsPath + ">>> Done with " + platformCi.getCiName() + ", cloud - " + bindingRel.getToCi().getCiName() + ", Time to process - " + timeTook + " ms.");
return maxExecOrder;
}
private boolean isPartialDeployment(String manifestNs) {
List<CmsCIRelation> dependsOns = cmProcessor.getCIRelationsNaked(manifestNs, "manifest.DependsOn", null, null, null);
for (CmsCIRelation rel : dependsOns) {
if (rel.getAttribute("pct_dpmt") != null && !"100".equals(rel.getAttribute("pct_dpmt").getDjValue())){
return true;
}
}
return false;
}
private CmsCI getStartingPoint(List<CmsCIRelation> mfstPlatComponents, List<BomRfc> boms) {
Set<Long> processedNodes = new HashSet<Long>();
for (BomRfc bom : boms) {
processedNodes.add(bom.manifestCiId);
}
for (CmsCIRelation manifestRel : mfstPlatComponents) {
if (!processedNodes.contains(manifestRel.getToCiId())) {
return manifestRel.getToCi();
}
}
return null;
}
private void processVars(List<BomRfc> boms, Map<String,String> cloudVars, Map<String,String> globalVars, Map<String,String> localVars) {
for (BomRfc bom : boms) {
trUtil.processAllVars(bom.mfstCi, cloudVars, globalVars, localVars);
}
}
private int findObsolete(List<BomRfc> newBoms, CmsCIRelation bindingRel, String nsPath, int startingExecOrder, Map<String, CmsCI> existingCIs,String userId, boolean global) {
logger.info(nsPath + " >>> finding cis to delete..." );
long startTime = System.currentTimeMillis();
int maxExecOrder = startingExecOrder;
Map<String, BomRfc> bomMap = new HashMap<String, BomRfc>();
for (BomRfc bom : newBoms) {
bomMap.put(bom.ciName, bom);
}
List<CmsCI> existingCis = new ArrayList<CmsCI>(existingCIs.values());
Map<Long, CmsCI> obsoleteCisMap = new HashMap<Long, CmsCI>();
for (CmsCI ci : existingCis) {
if (!bomMap.containsKey(ci.getCiName())) {
logger.info("This ci should be deleted - " + ci.getCiName());
obsoleteCisMap.put(ci.getCiId(), ci);
}
}
logger.info(nsPath + " >>> creating delete rfcs and traversing strong relations..." );
if (obsoleteCisMap.size()>0) {
maxExecOrder = processObsolete(newBoms, obsoleteCisMap, startingExecOrder, nsPath, userId, global);
}
logger.info(nsPath + " >>> Done creating delete rfcs, time taken:" + (System.currentTimeMillis() - startTime));
return maxExecOrder;
}
private int processObsolete(List<BomRfc> bomRfcs, Map<Long, CmsCI> obsoleteCisMap, int startingExecOrder, String nsPath, String userId, boolean global){
int maxExecOrder = startingExecOrder;
Set<Long> obsoleteToRelations = new HashSet<Long>();
Map<Long, List<CmsLink>> obsoleteFromRelations = new HashMap<Long, List<CmsLink>>();
List<CmsLink> dummyUpdateRels = new ArrayList<CmsLink>();
List<CmsLink> dependsOnLinks = cmProcessor.getLinks(nsPath, "bom.DependsOn");
//convert to map
Map<Long, List<CmsLink>> toCiDependsOnMap = new HashMap<Long, List<CmsLink>>();
for (CmsLink link : dependsOnLinks) {
if (!toCiDependsOnMap.containsKey(link.getToCiId())) {
toCiDependsOnMap.put(link.getToCiId(), new ArrayList<CmsLink>());
}
toCiDependsOnMap.get(link.getToCiId()).add(link);
}
for (Long ciId : obsoleteCisMap.keySet()) {
//List<CmsCIRelation> toDependsOnRels = cmProcessor.getToCIRelationsNakedNoAttrs(ciId, "bom.DependsOn", null, null);
if (toCiDependsOnMap.containsKey(ciId)) {
for (CmsLink fromDependsOnCiIdLink : toCiDependsOnMap.get(ciId)) {
if (obsoleteCisMap.containsKey(fromDependsOnCiIdLink.getFromCiId())) {
obsoleteToRelations.add(ciId);
if (!obsoleteFromRelations.containsKey(fromDependsOnCiIdLink.getFromCiId())) {
obsoleteFromRelations.put(fromDependsOnCiIdLink.getFromCiId(), new ArrayList<CmsLink>());
}
obsoleteFromRelations.get(fromDependsOnCiIdLink.getFromCiId()).add(fromDependsOnCiIdLink);
} else {
dummyUpdateRels.add(fromDependsOnCiIdLink);
}
}
}
}
Map<Long, Integer> execOrder = new HashMap<Long, Integer>();
for (Long ciId : obsoleteCisMap.keySet()) {
if (!obsoleteToRelations.contains(ciId)) {
execOrder.put(ciId, startingExecOrder);
processObsoleteOrder(ciId, execOrder, obsoleteFromRelations);
}
}
for (Long ciId : execOrder.keySet()) {
int ciExecOrder = execOrder.get(ciId);
CmsCI ci = obsoleteCisMap.get(ciId);
String shortClazzName = trUtil.getShortClazzName(ci.getCiClassName());
int actualExecOrder = ciExecOrder;
if (priorityMap.containsKey(shortClazzName)) {
int priorityOrder = priorityMap.get(shortClazzName);
actualExecOrder = startingExecOrder + obsoleteCisMap.size() + priorityMax - priorityOrder + 1;
}
createDeleteRfc(ci,actualExecOrder, userId);
maxExecOrder = (ciExecOrder > maxExecOrder) ? ciExecOrder : maxExecOrder;
}
Map<Long, List<String>> manifestPropagations = new HashMap<Long, List<String>>();
Set<Long> propagations = new HashSet<Long>();
long totalPropagationTime = 0;
//now lets submit submit dummy update
Set<Long> dummyUpdates = new HashSet<Long>();
if (dummyUpdateRels.size()>0) {
for (CmsLink rel : dummyUpdateRels) {
dummyUpdates.add(rel.getFromCiId());
for (BomRfc bomRfc : bomRfcs) {
if (bomRfc.rfc == null) {
logger.info("bom.rfc null for " + bomRfc.ciName + " nspath: " + nsPath);;
} else if (bomRfc.rfc.getCiId() == rel.getFromCiId()) {
long startTime = System.currentTimeMillis();
mapPropagations(bomRfc.manifestCiId, manifestPropagations);
if (manifestPropagations.get(bomRfc.manifestCiId).size() != 0) {
propagateUpdate(bomRfc.rfc.getCiId(), bomRfc.manifestCiId, manifestPropagations, userId, propagations);
}
long endTime = System.currentTimeMillis();
totalPropagationTime += totalPropagationTime + (endTime - startTime);
}
}
}
}
dummyUpdates.addAll(propagations);
maxExecOrder = processDummyUpdates(dummyUpdates, bomRfcs, maxExecOrder);
logger.info(nsPath + " >>> Total time taken by propagation in seconds: " + totalPropagationTime/1000.0);
return maxExecOrder;
}
private int processDummyUpdates(Set<Long> dummyUpdates,
List<BomRfc> bomRfcs, int maxExecOrder) {
if (dummyUpdates.size() > 0) {
TreeMap<Integer, List<Long>> dummyUpdateExecOrders = new TreeMap<Integer, List<Long>>();
//now lets grab the execution orders from the bomRfcs for the CIs to be dummy updated.
for (BomRfc bom : bomRfcs) {
if (bom.rfc == null) {
logger.info("rfc null for: " + bom.ciName);
continue;
}
if (dummyUpdates.contains(bom.rfc.getCiId())) {
List<Long> ciIds = dummyUpdateExecOrders.get(bom.execOrder);
if (ciIds == null) {
ciIds = new ArrayList<Long>();
dummyUpdateExecOrders.put(bom.execOrder, ciIds);
}
ciIds.add(bom.rfc.getCiId());
}
}
//Now lets iterate over the sorted order map to touch the dummy update CIs with exec order starting from max exec order
for (int order : dummyUpdateExecOrders.keySet()) {
maxExecOrder++;
for (long dummyUpdateCiId : dummyUpdateExecOrders.get(new Integer(order))) {
cmRfcMrgProcessor.createDummyUpdateRfc(dummyUpdateCiId, null, maxExecOrder, "oneops-transistor");
}
}
}
return maxExecOrder;
}
private void createDeleteRfc(CmsCI ci, int execOrder, String userId)
{
CmsRfcCI newRfc = new CmsRfcCI();
newRfc.setCiId(ci.getCiId());
newRfc.setCiClassId(ci.getCiClassId());
newRfc.setCiClassName(ci.getCiClassName());
newRfc.setCiGoid(ci.getCiGoid());
newRfc.setCiName(ci.getCiName());
newRfc.setComments("deleting");
newRfc.setNsId(ci.getNsId());
newRfc.setNsPath(ci.getNsPath());
newRfc.setRfcAction("delete");
newRfc.setExecOrder(execOrder);
newRfc.setCreatedBy(userId);
newRfc.setUpdatedBy(userId);
rfcProcessor.createRfcCI(newRfc, userId);
}
private void processObsoleteOrder(long startingCiId, Map<Long, Integer> execOrder, Map<Long, List<CmsLink>> obsoleteRelations) {
if (obsoleteRelations.containsKey(startingCiId)) {
int nextExecOrder = execOrder.get(startingCiId) + 1;
for (CmsLink rel : obsoleteRelations.get(startingCiId)) {
long nextCiId = rel.getToCiId();
if (execOrder.containsKey(nextCiId)) {
int currentEO = execOrder.get(nextCiId);
if (nextExecOrder > currentEO) {
execOrder.put(nextCiId, nextExecOrder);
}
} else {
execOrder.put(nextCiId, nextExecOrder);
}
processObsoleteOrder(nextCiId, execOrder, obsoleteRelations);
}
}
}
private int createBomRfcsAndRels(List<BomRfc> boms,
String nsPath,
CmsCIRelation bindingRel,
int startExecOrder,
boolean isPartial,
String userId,
ExistingRels existingRels,
Map<String, CmsCI> existingCIs,
Map<String, CmsRfcCI> existingRFCs,
Long releaseId) {
long nsId = trUtil.verifyAndCreateNS(nsPath);
Map<String, BomRfc> bomMap = new HashMap<String, BomRfc>();
for (BomRfc bom : boms) {
bomMap.put(bom.nodeId, bom);
}
// need to verify all the to links for the case when we have converge link
verifyToLinks(bomMap);
//lets find out the exec order and populate relations list
Map<String, BomLink> links = new HashMap<String, BomLink>();
for (BomRfc bom :boms) {
if (bom.fromLinks.size()==0) {
processOrder(bom, bomMap, startExecOrder, 1);
} else {
for (BomLink link : bom.fromLinks) {
links.put(link.fromNodeId + "@" + link.toNodeId, link);
//logger.info(link.fromNodeId + "-" + link.toNodeId);
}
}
}
int maxExecOrder = getMaxExecOrder(boms);
Map<Integer, List<BomRfc>> orderedMap = new HashMap<Integer, List<BomRfc>>();
for (BomRfc bom : boms) {
if (!orderedMap.containsKey(bom.execOrder)) {
orderedMap.put(bom.execOrder, new ArrayList<BomRfc>());
}
orderedMap.get(bom.execOrder).add(bom);
}
Set<Long> propagations = new HashSet<Long>();
Set<Long> bomCiIds = new HashSet<Long>();
Map<Long, List<String>> manifestPropagations = new HashMap<Long, List<String>>();
long timeTakenByPropagation = 0;
//now lets create rfcs
int realExecOrder = startExecOrder;
int numberOfRFCs = 0;
List<CmsRfcCI> replacedComputes = new ArrayList<CmsRfcCI>();
for (int i=startExecOrder; i<=maxExecOrder; i++) {
boolean incOrder = false;
if (orderedMap.containsKey(i)) {
for (BomRfc bom : orderedMap.get(i)) {
String shortClazzName = trUtil.getShortClazzName(bom.mfstCi.getCiClassName());
String bomId = "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName()) + ":" + bom.ciName;
CmsCI existingCi = existingCIs.get(bomId);
CmsRfcCI existingRfc = existingRFCs.get(bomId);
boolean rfcCreated = false;
if (priorityMap.containsKey(shortClazzName)) {
bom.execOrder = priorityMap.get(shortClazzName);
rfcCreated = upsertRfcs(bom, existingCi, existingRfc, nsId, nsPath, bindingRel, releaseId, userId, existingRels);
if (rfcCreated && realExecOrder == 1) incOrder = true;
} else {
//bom.execOrder = realExecOrder;
rfcCreated = upsertRfcs(bom, existingCi, existingRfc, nsId, nsPath, bindingRel, releaseId, userId, existingRels);
if (rfcCreated && bom.rfc != null) {
//if rfc was created, lets check if any propagation is required
if(bom.rfc.getCiClassName().equals("bom.Compute")
&& bom.rfc.getRfcAction().equals("replace")) {
replacedComputes.add(bom.rfc);
}
long startTime = System.currentTimeMillis();
if (manifestPropagations.get(bom.manifestCiId) == null) {
mapPropagations(bom.manifestCiId, manifestPropagations);
}
if (manifestPropagations.get(bom.manifestCiId).size() != 0) {
propagateUpdate(bom.rfc.getCiId(), bom.manifestCiId, manifestPropagations, userId, propagations);
}
long endTime = System.currentTimeMillis();
timeTakenByPropagation = timeTakenByPropagation + (endTime - startTime);
}
incOrder = incOrder || rfcCreated;
}
if (bom.rfc != null) {
bomCiIds.add(bom.rfc.getCiId());
}
if (rfcCreated) {
numberOfRFCs++;
if (numberOfRFCs % 10 == 0) {
logger.info(">>> Inserted " + numberOfRFCs + " rfcs;");
}
}
}
}
if (incOrder) realExecOrder++;
}
logger.info(">>> Inserted " + numberOfRFCs + " rfcs;");
logger.info(">>> Done with RFCs working on relations...");
//lets create dependsOn Relations
//TODO question should we propagate rel attrs
int maxRfcExecOrder = getMaxRfcExecOrder(boms);
maxExecOrder = (maxRfcExecOrder > 0) ? maxRfcExecOrder : maxExecOrder;
//execute all dummmy updates in one last step
//maxExecOrder++;
//List<CmsRfcRelation> existingDependsOnRels = cmRfcMrgProcessor.getDfDjRelations("bom.DependsOn", null, nsPath, null, null, null);
Set<String> djRelGoids = new HashSet<String>();
boolean increaseMaxOrder = false;
int numberOfRelRFCs = 0;
for (BomLink link : links.values()) {
if (bomMap.get(link.fromNodeId).rfc != null &&
bomMap.get(link.toNodeId).rfc != null) {
long fromCiId = bomMap.get(link.fromNodeId).rfc.getCiId();
long toCiId = bomMap.get(link.toNodeId).rfc.getCiId();
CmsRfcRelation dependsOn = bootstrapRelationRfc(fromCiId,toCiId,"bom.DependsOn", nsPath, existingRels);
dependsOn.setComments(generateRelComments(bomMap.get(link.fromNodeId).rfc.getCiName(),
bomMap.get(link.fromNodeId).rfc.getCiClassName(),
bomMap.get(link.toNodeId).rfc.getCiName(),
bomMap.get(link.toNodeId).rfc.getCiClassName()));
dependsOn.setCreatedBy(userId);
dependsOn.setUpdatedBy(userId);
dependsOn.setNsId(nsId);
if (bomMap.get(link.fromNodeId).rfc.getRfcId() > 0) {
dependsOn.setFromRfcId(bomMap.get(link.fromNodeId).rfc.getRfcId());
}
if (bomMap.get(link.toNodeId).rfc.getRfcId() >0) {
dependsOn.setToRfcId(bomMap.get(link.toNodeId).rfc.getRfcId());
}
//since the DependsOn validation happened on Manifest level already we will skip validation here for perf reasons
//dependsOn.setValidated(true);
//CmsRfcRelation newRel = cmRfcMrgProcessor.upsertRfcRelationNoCheck(dependsOn, userId, "dj");
createBomRelationRfc(dependsOn, existingRels, releaseId);
djRelGoids.add(dependsOn.getRelationGoid());
//if we got new relation lets update create dummy update rfcs
if (dependsOn.getRfcId()>0) {
numberOfRelRFCs++;
existingRels.addRelRfc(dependsOn);
if (bomMap.get(link.fromNodeId).rfc.getRfcId()==0) {
cmRfcMrgProcessor.createDummyUpdateRfc(fromCiId, null, bomMap.get(link.fromNodeId).execOrder, userId);
long startTime = System.currentTimeMillis();
if (manifestPropagations.get(bomMap.get(link.fromNodeId).manifestCiId) == null) {
mapPropagations(bomMap.get(link.fromNodeId).manifestCiId, manifestPropagations);
}
if (manifestPropagations.get(bomMap.get(link.fromNodeId).manifestCiId).size() != 0) {
propagateUpdate(fromCiId, bomMap.get(link.fromNodeId).manifestCiId, manifestPropagations, userId, propagations);
}
long endTime = System.currentTimeMillis();
timeTakenByPropagation = timeTakenByPropagation + (endTime - startTime);
increaseMaxOrder = true;
}
if (numberOfRelRFCs % 10 == 0) {
logger.info(">>> Inserted " + numberOfRelRFCs + " relation rfcs;");
}
}
}
}
logger.info(">>> Inserted " + numberOfRelRFCs + " relation rfcs;");
//Now create dummy updates for all the dependency-propagations needed
if (propagations.size() > 0) {
for (BomRfc bom : boms) {
if (bom.rfc == null) {
logger.info("rfc null for: " + bom.ciName);
continue;
}
if (propagations.contains(bom.rfc.getCiId())) {
String bomId = "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName()) + ":" + bom.ciName;
CmsCI existingCi = existingCIs.get(bomId);
CmsRfcCI existingRfc = existingRFCs.get(bomId);
CmsRfcCI rfc = bootstrapRfc(bom,existingRfc, existingCi, nsPath);
rfc.setCreatedBy(userId);
rfc.setUpdatedBy(userId);
rfc.setNsId(nsId);
cmRfcMrgProcessor.createDummyUpdateRfc(rfc.getCiId(), null, bom.execOrder, userId);
}
}
}
//hack for lb/fqdn update on replaced computes
propagate4ComputeReplace(replacedComputes);
if (!isPartial) {
for (CmsCIRelation existingRel : existingRels.getExistingRel(BOM_DEPENDS_ON_RELATION_NAME)) {
if (!djRelGoids.contains(existingRel.getRelationGoid())
&& bomCiIds.contains(existingRel.getFromCiId())
&& bomCiIds.contains(existingRel.getToCiId())) {
cmRfcMrgProcessor.requestRelationDelete(existingRel.getCiRelationId(), userId);
}
}
}
if (increaseMaxOrder) maxExecOrder++;
logger.info(nsPath + " >>> Total time taken by propagation in seconds: " + timeTakenByPropagation/1000);
return maxExecOrder;
}
private void propagate4ComputeReplace(List<CmsRfcCI> bomCompRfcs) {
for (CmsRfcCI rfc : bomCompRfcs) {
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Lb")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Fqdn")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
}
}
private Map<String, CmsCI> getExistingCis(long cloudId, String nsPath) {
List<CmsCIRelation> bomRels = cmProcessor.getToCIRelationsByNs(cloudId, BOM_CLOUD_RELATION_NAME, null, null, nsPath);
Map<String, CmsCI> bomCIs = new HashMap<String, CmsCI>();
for (CmsCIRelation rel : bomRels) {
CmsCI bomCi = rel.getFromCi();
String key =bomCi.getCiClassName() + ":" + bomCi.getCiName();
bomCIs.put(key, bomCi);
}
return bomCIs;
}
private Map<String, Map<String,CmsCIRelation>> getExistingRelations(String nsPath) {
List<CmsCIRelation> bomRels = cmProcessor.getCIRelationsNaked(nsPath, null, null, null, null);
Map<String, Map<String,CmsCIRelation>> bomRelsMap = new HashMap<String, Map<String,CmsCIRelation>>();
for (CmsCIRelation rel : bomRels) {
if (!bomRelsMap.containsKey(rel.getRelationName())) {
bomRelsMap.put(rel.getRelationName(), new HashMap<String,CmsCIRelation>());
}
bomRelsMap.get(rel.getRelationName()).put(rel.getFromCiId() + ":" + rel.getToCiId(), rel);
}
return bomRelsMap;
}
private Map<String, CmsRfcCI> getOpenRFCs(String nsPath) {
List<CmsRfcCI> existingRfcs = rfcProcessor.getOpenRfcCIByClazzAndName(nsPath, null, null);
Map<String, CmsRfcCI> rfcs = new HashMap<String, CmsRfcCI>();
for (CmsRfcCI rfc : existingRfcs) {
String key = rfc.getCiClassName() + ":" + rfc.getCiName();
rfcs.put(key,rfc);
}
return rfcs;
}
private Map<String, Map<String,CmsRfcRelation>> getOpenRelationsRfcs(String nsPath) {
List<CmsRfcRelation> bomRels = rfcProcessor.getOpenRfcRelationsByNs(nsPath);
Map<String, Map<String,CmsRfcRelation>> bomRelsMap = new HashMap<String, Map<String,CmsRfcRelation>>();
for (CmsRfcRelation rel : bomRels) {
if (!bomRelsMap.containsKey(rel.getRelationName())) {
bomRelsMap.put(rel.getRelationName(), new HashMap<String,CmsRfcRelation>());
}
bomRelsMap.get(rel.getRelationName()).put(rel.getFromCiId() + ":" + rel.getToCiId(), rel);
}
return bomRelsMap;
}
private void propagateUpdate(long bomCiId, long manifestId,
Map<Long, List<String>> manifestPropagations, String userId, Set<Long> propagations) {
List<String> targetManifestCiNames = manifestPropagations.get(manifestId);
List<CmsCIRelation> rels = cmProcessor.getAllCIRelations(bomCiId);// all bom relations for this bom ci
if (targetManifestCiNames == null) {
logger.info("nothing to propagate for bomCiId: " + bomCiId + " and manifestCiId: " + manifestId);
return;
}
for (String targetCiName : targetManifestCiNames) {
for (CmsCIRelation rel : rels) {
if (! rel.getRelationName().equals("bom.DependsOn")) {
continue;
}
if (rel.getFromCi() != null) {
String ciName = rel.getFromCi().getCiName();
if (ciName != null && ciName.startsWith(targetCiName + "-")) {
if (propagations.contains(rel.getFromCiId())) {
continue;
}
logger.info("propagating update from bom cid : " + bomCiId + " to " + rel.getFromCiId());
propagations.add(rel.getFromCiId());
List<CmsCIRelation> realizedAs = cmProcessor.getToCIRelations(rel.getFromCiId(),
"base.RealizedAs", rel.getFromCi().getCiClassName().replaceFirst("bom", "manifest"));
if (realizedAs != null) {
propagateUpdate(rel.getFromCiId(), realizedAs.get(0).getFromCiId(), manifestPropagations, userId, propagations);
}
}
} else if (rel.getToCi() != null) {
String ciName = rel.getToCi().getCiName();
if (ciName != null && ciName.startsWith(targetCiName + "-")) {
if (propagations.contains(rel.getToCiId())) {
continue;
}
logger.info("propagating update from bom cid : " + bomCiId + " to " + rel.getToCiId());
propagations.add(rel.getToCiId());
List<CmsCIRelation> realizedAs = cmProcessor.getToCIRelations(rel.getToCiId(),
"base.RealizedAs", rel.getToCi().getCiClassName().replaceFirst("bom", "manifest"));
if (realizedAs != null) {
propagateUpdate(rel.getToCiId(), realizedAs.get(0).getFromCiId(), manifestPropagations, userId, propagations);
}
}
}
}
}
}
private void mapPropagations(long manifestCiId, Map<Long, List<String>> manifestPropagations) {
List<String> targetManifests = manifestPropagations.get(manifestCiId);
if (targetManifests != null) {
return;//propagations already calculated for this manifest cid
}
targetManifests = new ArrayList<String>();
manifestPropagations.put(manifestCiId, targetManifests);
List<CmsCIRelation> rels = cmProcessor.getAllCIRelations(manifestCiId);
for (CmsCIRelation rel : rels) {
if (! rel.getRelationName().equals("manifest.DependsOn")) {
continue;
}
CmsCIRelationAttribute attrib = rel.getAttribute("propagate_to");
if (attrib != null && attrib.getDfValue() != null
) {
if (rel.getFromCiId() > 0
&&rel.getFromCiId() == manifestCiId
&& (attrib.getDfValue().equalsIgnoreCase("to") || attrib.getDfValue().equalsIgnoreCase("both"))) {
//found
targetManifests.add(rel.getToCi().getCiName());
mapPropagations(rel.getToCiId(), manifestPropagations);
} else if (rel.getToCiId() > 0
&&rel.getToCiId() == manifestCiId
&& (attrib.getDfValue().equalsIgnoreCase("from") || attrib.getDfValue().equalsIgnoreCase("both"))) {
//found
targetManifests.add(rel.getFromCi().getCiName());
mapPropagations(rel.getFromCiId(), manifestPropagations);
}
}
}
}
private void verifyToLinks(Map<String, BomRfc> bomMap) {
for (Map.Entry<String, BomRfc> entry : bomMap.entrySet()) {
for (BomLink link : entry.getValue().fromLinks) {
BomRfc toBom = bomMap.get(link.toNodeId);
if (toBom.getExisitngToLinks(entry.getValue().nodeId) == null) {
BomLink tolink = new BomLink();
tolink.fromNodeId = entry.getValue().nodeId;
tolink.fromMfstCiId = entry.getValue().manifestCiId;
tolink.toNodeId = toBom.nodeId;
tolink.toMfstCiId = toBom.manifestCiId;
toBom.toLinks.add(tolink);
}
}
}
}
private boolean upsertRfcs(BomRfc bom, CmsCI existingCi, CmsRfcCI existingRfc, long nsId, String nsPath, CmsCIRelation bindingRel, Long releaseId, String userId, ExistingRels existingRels) {
boolean rfcCreated = false;
if (bom.mfstCi.getCiState().equalsIgnoreCase("pending_deletion")) {
List<CmsRfcCI> cis2delete = cmRfcMrgProcessor.getDfDjCi(nsPath, "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName()), bom.ciName, "dj");
if (cis2delete.size() > 0) {
for (CmsRfcCI ci2delete : cis2delete) {
//bom.rfc = cmRfcMrgProcessor.requestCiDelete(ci2delete.getCiId(), userId, bom.execOrder);
bom.rfc = cmRfcMrgProcessor.requestCiDeleteCascadeNoRelsRfcs(ci2delete.getCiId(), userId, bom.execOrder);
rfcCreated = bom.rfc.getRfcId() > 0;
}
} else {
//if no boms lets see if we have some in other cloud
if (cmProcessor.getCountFromCIRelationsByNS(bom.mfstCi.getCiId(), "base.RealizedAs", null, null, nsPath, false) == 0) {
cmProcessor.deleteCI(bom.mfstCi.getCiId(), true, userId);
}
}
} else {
CmsRfcCI rfc = bootstrapRfc(bom, existingRfc, existingCi, nsPath);
rfc.setCreatedBy(userId);
rfc.setUpdatedBy(userId);
rfc.setNsId(nsId);
//bom.rfc = cmRfcMrgProcessor.upsertRfcCINoChecks(rfc, userId, "dj");
createBomRfc(rfc,existingCi, existingRfc, releaseId);
bom.rfc = rfc;
rfcCreated = bom.rfc.getRfcId() > 0;
if (bom.rfc.getRfcId() == 0) {
//lets make sure the manifest object has not changed or we will create dummy update
CmsCIRelation realizedAsRel = existingRels.getExistingRel(BOM_REALIZED_RELATION_NAME, bom.mfstCi.getCiId(), bom.rfc.getCiId());
//cmProcessor.getFromToCIRelations(bom.mfstCi.getCiId(), "base.RealizedAs", bom.rfc.getCiId());
if (realizedAsRel != null && realizedAsRel.getAttribute("last_manifest_rfc") != null) {
long deployedManifestRfc = Long.valueOf(realizedAsRel.getAttribute("last_manifest_rfc").getDjValue());
if (bom.mfstCi.getLastAppliedRfcId() > deployedManifestRfc) {
//TODO convert to direct insert
bom.rfc = cmRfcMrgProcessor.createDummyUpdateRfc(bom.rfc.getCiId(), null, bom.execOrder, userId);
rfcCreated = true;
}
}
}
//lets create RealizedAs relation
Map<String,String> attrs = new HashMap<String,String>();
attrs.put("last_manifest_rfc", String.valueOf(bom.mfstCi.getLastAppliedRfcId()));
CmsRfcRelation realizedAs = bootstrapRelationRfcWithAttributes(bom.mfstCi.getCiId(), bom.rfc.getCiId(), "base.RealizedAs", nsPath, attrs, existingRels);
if (rfcCreated) {
realizedAs.setToRfcId(bom.rfc.getRfcId());
}
realizedAs.setComments(generateRelComments(bom.mfstCi.getCiName(), bom.mfstCi.getCiClassName(), bom.rfc.getCiName(), bom.rfc.getCiClassName()));
realizedAs.getAttribute("priority").setNewValue(bindingRel.getAttribute("priority").getDjValue());
realizedAs.setCreatedBy(userId);
realizedAs.setUpdatedBy(userId);
realizedAs.setNsId(nsId);
//validateRelRfc(realizedAs, bom.mfstCi.getCiClassId(), bom.rfc.getCiClassId());
//realizedAs.setValidated(true);
createBomRelationRfc(realizedAs, existingRels, releaseId);
//cmRfcMrgProcessor.upsertRfcRelationNoCheck(realizedAs, userId, "dj");
//lest create relation to the binding
CmsRfcRelation deployedTo = bootstrapRelationRfc(bom.rfc.getCiId(), bindingRel.getToCiId(), "base.DeployedTo", nsPath, existingRels);
deployedTo.setComments(generateRelComments(bom.rfc.getCiName(), bom.rfc.getCiClassName(), bindingRel.getToCi().getCiName(), bindingRel.getToCi().getCiClassName()));
deployedTo.getAttribute("priority").setNewValue(bindingRel.getAttribute("priority").getDjValue());
deployedTo.setCreatedBy(userId);
deployedTo.setUpdatedBy(userId);
deployedTo.setNsId(nsId);
//validateRelRfc(deployedTo, bom.rfc.getCiClassId(), bindingRel.getToCi().getCiClassId());
//deployedTo.setValidated(true);
if (rfcCreated) {
deployedTo.setFromRfcId(bom.rfc.getRfcId());
}
createBomRelationRfc(deployedTo, existingRels, releaseId);
//cmRfcMrgProcessor.upsertRfcRelationNoCheck(deployedTo, userId, "dj");
}
return rfcCreated;
}
private void createBomRfc(CmsRfcCI rfc, CmsCI existingCi, CmsRfcCI existingRfc, Long releaseId) {
if (rfc.getCiId() == 0) {
//this is add rfc
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
rfc.setRfcAction("add");
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfc(rfc);
} else {
rfcProcessor.updateBomRfc(rfc, existingRfc);
}
} else {
//need to figure out delta and create update rfc
if (needUpdateRfc(rfc, existingCi)) {
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfc(rfc);
} else {
rfcProcessor.updateBomRfc(rfc, existingRfc);
}
/*
if(rfc.getCiClassName().equals("bom.Compute")
&& rfc.getRfcAction().equals("replace")) {
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Lb")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
for (CmsCIRelation rel : cmProcessor.getToCIRelationsNakedNoAttrs(rfc.getCiId(), "bom.DependsOn", null, "bom.Fqdn")) {
cmRfcMrgProcessor.createDummyUpdateRfc(rel.getFromCiId(), null, rfc.getExecOrder() + 1, rfc.getCreatedBy());
}
}
*/
}
}
}
private boolean needUpdateRfc(CmsRfcCI rfcCi, CmsCI baseCi) {
boolean needUpdate = false;
if ("replace".equals(baseCi.getCiState())) {
rfcCi.setRfcAction("replace");
needUpdate = true;
} else {
rfcCi.setRfcAction("update");
}
Set<String> equalAttrs = new HashSet<String>( rfcCi.getAttributes().size());
for (CmsRfcAttribute attr : rfcCi.getAttributes().values()){
CmsCIAttribute existingAttr = baseCi.getAttribute(attr.getAttributeName());
if (djValidator.equalStrs(attr.getNewValue(), existingAttr.getDjValue())) {
equalAttrs.add(attr.getAttributeName());
} else {
needUpdate = true;
}
}
if (needUpdate) {
for (String equalAttrName : equalAttrs) {
rfcCi.getAttributes().remove(equalAttrName);
}
}
return needUpdate;
}
private void createBomRelationRfc(CmsRfcRelation rfc, ExistingRels existingRels, Long releaseId) {
if (rfc.getCiRelationId() == 0) {
//this is add rfc
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
rfc.setRfcAction("add");
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfcRelation(rfc);
} else {
rfcProcessor.updateBomRfcRelation(rfc, existingRels.getOpenRelRfc(rfc.getRelationName(), rfc.getFromCiId(), rfc.getToCiId()));
}
} else {
//need to figure out delta and create update rfc
CmsCIRelation existingRel = existingRels.getExistingRel(rfc.getRelationName(), rfc.getFromCiId(), rfc.getToCiId());
if (needUpdateRfcRelation(rfc, existingRel)) {
if (releaseId == null) {
if (rfc.getReleaseId() > 0) {
releaseId = rfc.getReleaseId();
} else {
rfc.setReleaseId(rfcProcessor.getOpenReleaseIdByNs(rfc.getReleaseNsPath(), null, rfc.getCreatedBy()));
}
}
rfc.setIsActiveInRelease(true);
rfc.setRfcAction("update");
if (rfc.getRfcId() == 0) {
rfcProcessor.createBomRfcRelation(rfc);
} else {
rfcProcessor.updateBomRfcRelation(rfc, existingRels.getOpenRelRfc(rfc.getRelationName(), rfc.getFromCiId(), rfc.getToCiId()));
}
}
}
}
private boolean needUpdateRfcRelation(CmsRfcRelation rfcRel, CmsCIRelation baseRel) {
boolean needUpdate = false;
Set<String> equalAttrs = new HashSet<String>( rfcRel.getAttributes().size());
for (CmsRfcAttribute attr : rfcRel.getAttributes().values()){
CmsCIRelationAttribute existingAttr = baseRel.getAttribute(attr.getAttributeName());
if (djValidator.equalStrs(attr.getNewValue(), existingAttr.getDjValue())) {
equalAttrs.add(attr.getAttributeName());
} else {
needUpdate = true;
}
}
if (needUpdate) {
for (String attrName : equalAttrs) {
rfcRel.getAttributes().remove(attrName);
}
}
return needUpdate;
}
private void validateRelRfc(CmsRfcRelation rfcRelation, int fromClassId, int toClassId) {
CIValidationResult validation = djValidator.validateRfcRelation(rfcRelation, fromClassId, toClassId);
if (!validation.isValidated()) {
logger.error(validation.getErrorMsg());
throw new DJException(CmsError.DJ_VALIDATION_ERROR, validation.getErrorMsg());
}
rfcRelation.setValidated(true);
}
private String generateRelComments(String fromCiName, String fromCiClass, String toCiName, String toCiClass) {
Map<String, String> strMap = new HashMap<String, String>();
strMap.put("fromCiName", fromCiName);
strMap.put("fromCiClass", fromCiClass);
strMap.put("toCiName", toCiName);
strMap.put("toCiClass", toCiClass);
return gson.toJson(strMap);
}
public int deleteManifestPlatform(CmsCI platformCi, CmsCIRelation bindingRel, String nsPath, int startExecOrder, String userId){
int maxExecOrder = 0;
List<CmsCIRelation> mfstPlatComponents = cmProcessor.getFromCIRelations(platformCi.getCiId(), null, "Requires", null);
if (mfstPlatComponents.size() > 0) {
//List<BomRfc> boms = new ArrayList<BomRfc>();
String platNsPath = null;
if (platformCi.getCiClassName().equals("manifest.Iaas")) {
platNsPath = nsPath + "/" + platformCi.getCiName();
} else {
platNsPath = nsPath + "/" + platformCi.getCiName() + "/" + platformCi.getAttribute("major_version").getDjValue();
}
long numOfBoms = cmProcessor.getCountBy3(platNsPath, null, null, false);
if (numOfBoms >0) {
logger.info(nsPath + ">>>" + platformCi.getCiName() + ", finding obsolete boms");
Map<String, CmsCI> existingCIs = getExistingCis(bindingRel.getToCiId(), platNsPath);
maxExecOrder = findObsolete(new ArrayList<BomRfc>(), bindingRel, platNsPath, startExecOrder, existingCIs, userId, true);
} else {
// there is no boms lets cleanup any open rfcs if any
List<CmsRfcRelation> deployedTorfcRels = rfcProcessor.getOpenToRfcRelationByTargetClazzNoAttrs(bindingRel.getToCiId(), "base.DeployedTo", null, null);
for (CmsRfcRelation deployedToRel : deployedTorfcRels) {
List<CmsRfcRelation> rfcRels = rfcProcessor.getOpenRfcRelationBy2(deployedToRel.getFromCiId(), null, null, null);
rfcRels.addAll(rfcProcessor.getOpenRfcRelationBy2(null, deployedToRel.getFromCiId(), null, null));
for (CmsRfcRelation rfcRel : rfcRels) {
rfcProcessor.rmRfcRelationFromRelease(rfcRel.getRfcId());
}
rfcProcessor.rmRfcCiFromRelease(deployedToRel.getFromRfcId());
}
}
if (platformCi.getCiState().equalsIgnoreCase("pending_deletion") && numOfBoms==0) {
//if no bom exists - delete the manifest platform for real
for (CmsCIRelation mfstPlatComponentRel : mfstPlatComponents) {
cmProcessor.deleteCI(mfstPlatComponentRel.getToCiId(), true, userId);
}
cmProcessor.deleteCI(platformCi.getCiId(), true, userId);
trUtil.deleteNs(platNsPath);
}
}
return maxExecOrder;
}
private CmsRfcCI bootstrapRfc(BomRfc bom, CmsRfcCI existingRfc, CmsCI existingBomCi, String nsPath) {
CmsRfcCI newRfc = new CmsRfcCI();
newRfc.setNsPath(nsPath);
String targetClazzName = "bom." + trUtil.getLongShortClazzName(bom.mfstCi.getCiClassName());
CmsClazz targetClazz = mdProcessor.getClazz(targetClazzName);
newRfc.setCiClassId(targetClazz.getClassId());
newRfc.setCiClassName(targetClazz.getClassName());
//bootstrap the default values from Class definition and populate map for checks
Map<String, CmsClazzAttribute> clazzAttrs = new HashMap<String, CmsClazzAttribute>();
for (CmsClazzAttribute clAttr : targetClazz.getMdAttributes()) {
if (clAttr.getDefaultValue() != null) {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(clAttr.getAttributeId());
rfcAttr.setAttributeName(clAttr.getAttributeName());
rfcAttr.setNewValue(clAttr.getDefaultValue());
newRfc.addAttribute(rfcAttr);
}
clazzAttrs.put(clAttr.getAttributeName(), clAttr);
}
//populate values from manifest obj if it's not null
applyCiToRfc(newRfc, bom, clazzAttrs, true);
newRfc.setExecOrder(bom.execOrder);
setCiId(newRfc, existingRfc, existingBomCi);
return newRfc;
}
/*
private void reverseExecOrder(List<BomRfc> boms, int startOrder) {
int maxOrder = getMaxExecOrder(boms);
for (BomRfc bom:boms) {
bom.execOrder = maxOrder-bom.execOrder+startOrder;
}
}
*/
private void processOrder(BomRfc bom, Map<String, BomRfc> bomMap, int order, int recursionDepth) {
if (recursionDepth >= MAX_RECUSION_DEPTH) {
String err = "Circular dependency detected, (level - " + recursionDepth + "),\n please check the platform diagram for " + extractPlatformNameFromNsPath(bom.mfstCi.getNsPath());
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
bom.execOrder = (order > bom.execOrder) ? order : bom.execOrder;
order += 1;
for (BomLink link : bom.toLinks) {
BomRfc parentBom = bomMap.get(link.fromNodeId);
processOrder(parentBom, bomMap, order, recursionDepth + 1);
}
}
private int getMaxExecOrder(List<BomRfc> boms) {
int maxExecOrder = 0;
for (BomRfc bom : boms) {
maxExecOrder = (bom.execOrder > maxExecOrder) ? bom.execOrder : maxExecOrder;
}
return maxExecOrder;
}
private int getMaxRfcExecOrder(List<BomRfc> boms) {
int maxExecOrder = 0;
for (BomRfc bom : boms) {
if (bom.rfc != null && bom.rfc.getRfcId()>0) {
maxExecOrder = (bom.execOrder > maxExecOrder) ? bom.execOrder : maxExecOrder;
}
}
return maxExecOrder;
}
private void processEntryPointRel(long platformCiId, Map<Long, List<BomRfc>> bomsMap, String nsPath, String user, ExistingRels existingRels) {
List<CmsCIRelation> entryPoints = cmProcessor.getFromCIRelationsNaked(platformCiId, null, "Entrypoint", null);
for (CmsCIRelation epRel : entryPoints) {
if (bomsMap.containsKey(epRel.getToCiId())) {
for (BomRfc bom : bomsMap.get(epRel.getToCiId())) {
if (bom.rfc != null) {
CmsRfcRelation entryPoint = bootstrapRelationRfc(platformCiId, bom.rfc.getCiId(),"base.Entrypoint", nsPath, existingRels);
cmRfcMrgProcessor.upsertRelationRfc(entryPoint, user, "dj");
}
}
}
}
}
private void processManagedViaRels(List<CmsCIRelation> mfstCiRels, Map<Long, List<BomRfc>> bomsMap, String nsPath, String user, ExistingRels existingRels, Long releaseId) {
long nsId = trUtil.verifyAndCreateNS(nsPath);
List<CmsLink> dependsOnlinks = cmRfcMrgProcessor.getLinks(nsPath, "bom.DependsOn");
//convert to map for traversing the path
Map<Long, Map<String,List<Long>>> dependsOnMap = new HashMap<Long, Map<String,List<Long>>>();
for (CmsLink link : dependsOnlinks) {
if (!dependsOnMap.containsKey(link.getFromCiId())) {
dependsOnMap.put(link.getFromCiId(), new HashMap<String,List<Long>>());
}
if (!dependsOnMap.get(link.getFromCiId()).containsKey(link.getToClazzName())) {
dependsOnMap.get(link.getFromCiId()).put(link.getToClazzName(), new ArrayList<Long>());
}
dependsOnMap.get(link.getFromCiId()).get(link.getToClazzName()).add(link.getToCiId());
}
Set<String> relRfcGoids = new HashSet<String>();
for (CmsCIRelation mfstCiRel : mfstCiRels) {
CmsCI mfstCi = mfstCiRel.getToCi();
//first lets check if we even have an add rfc for this Ci
//if (newRfcExists(mfstCi.getCiId(), bomsMap)) {
List<CmsCIRelation> mfstMngViaRels = cmProcessor.getFromCIRelationsNaked(mfstCi.getCiId(), null, "ManagedVia", null);
for (CmsCIRelation mfstMngViaRel : mfstMngViaRels) {
// lets find the path
//List<String> pathClasses = getTraversalPath(mfstMngViaRel);
List<String> pathClasses = getDpOnPath(mfstMngViaRel.getFromCiId(), mfstMngViaRel.getToCiId());
if (pathClasses.size()==0) {
String err = "Can not traverse ManagedVia relation using DependsOn path from ci " + mfstMngViaRel.getFromCiId() + ", to ci " + mfstMngViaRel.getToCiId() + "\n";
err += mfstMngViaRel.getComments();
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
for (BomRfc bomRfc : bomsMap.get(mfstCi.getCiId())) {
//for this rfc we need to traverse by the DependsOn path down to ManagedVia Ci and create the relation\
//Now this is tricky since it could get resolved as a tree so we need to use recursion
LinkedList<String> path = new LinkedList<String>();
path.addAll(pathClasses);
if (bomRfc.rfc != null) {
List<Long> targets = getLeafsByPath(bomRfc.rfc.getCiId(), path,mfstMngViaRel.getToCiId(), dependsOnMap);
Map<Long, BomRfc> targetMap = new HashMap<Long, BomRfc>();
for (BomRfc targetBom : bomsMap.get(mfstMngViaRel.getToCiId())) {
targetMap.put(targetBom.rfc.getCiId(), targetBom);
}
for (long managedViaCiId : targets) {
CmsCIRelation existingRel = existingRels.getExistingRel(BOM_MANAGED_VIA_RELATION_NAME, bomRfc.rfc.getCiId(), managedViaCiId);
//cmProcessor.getFromToCIRelationsNaked(bomRfc.rfc.getCiId(), "bom.ManagedVia", managedViaCiId);
if (existingRel == null) {
CmsRfcRelation managedVia = bootstrapRelationRfc(bomRfc.rfc.getCiId(), managedViaCiId, "bom.ManagedVia", nsPath, existingRels);
managedVia.setNsId(nsId);
managedVia.setReleaseId(bomRfc.rfc.getReleaseId());
if (!relRfcGoids.contains(managedVia.getRelationGoid())) {
if (targetMap.containsKey(managedViaCiId)) {
CmsRfcCI toCiRfc = targetMap.get(managedViaCiId).rfc;
managedVia.setComments(generateRelComments(bomRfc.rfc.getCiName(), bomRfc.rfc.getCiClassName(), toCiRfc.getCiName(), toCiRfc.getCiClassName()));
if (bomRfc.rfc != null && bomRfc.rfc.getRfcId() > 0) {
managedVia.setFromRfcId(bomRfc.rfc.getRfcId());
}
if (toCiRfc.getRfcId() > 0) {
managedVia.setToRfcId(toCiRfc.getRfcId());
}
//managedVia.setValidated(true);
createBomRelationRfc(managedVia,existingRels,releaseId);
relRfcGoids.add(managedVia.getRelationGoid());
//cmRfcMrgProcessor.upsertRfcRelationNoCheck(managedVia, user, "dj");
}
}
}
}
}
//}
}
}
}
};
private void processSecuredByRels(List<CmsCIRelation> mfstCiRels, Map<Long, List<BomRfc>> bomsMap, String nsPath, String user, ExistingRels existingRels, Long releaseId) {
long nsId = trUtil.verifyAndCreateNS(nsPath);
for (CmsCIRelation mfstCiRel : mfstCiRels) {
CmsCI mfstCi = mfstCiRel.getToCi();
List<CmsCIRelation> mfstSecuredByRels = cmProcessor.getFromCIRelationsNaked(mfstCi.getCiId(), null, "SecuredBy", null);
for (CmsCIRelation mfstSecuredByRel : mfstSecuredByRels) {
for (BomRfc fromBomRfc : bomsMap.get(mfstCi.getCiId())) {
for (BomRfc toBomRfc : bomsMap.get(mfstSecuredByRel.getToCiId())) {
CmsRfcRelation securedBy = bootstrapRelationRfc(fromBomRfc.rfc.getCiId(), toBomRfc.rfc.getCiId(), "bom.SecuredBy", nsPath, existingRels);
securedBy.setComments(generateRelComments(fromBomRfc.rfc.getCiName(), fromBomRfc.rfc.getCiClassName(), toBomRfc.rfc.getCiName(), toBomRfc.rfc.getCiClassName()));
securedBy.setCreatedBy(user);
securedBy.setUpdatedBy(user);
securedBy.setNsId(nsId);
validateRelRfc(securedBy, fromBomRfc.rfc.getCiClassId(), toBomRfc.rfc.getCiClassId());
if (fromBomRfc.rfc.getRfcId() > 0) {
securedBy.setFromRfcId(fromBomRfc.rfc.getRfcId());
}
if (toBomRfc.rfc.getRfcId() > 0 ) {
securedBy.setToRfcId(toBomRfc.rfc.getRfcId());
}
createBomRelationRfc(securedBy, existingRels, releaseId);
//cmRfcMrgProcessor.upsertRelationRfc(securedBy, user, "dj");
}
}
}
}
};
private List<Long> getLeafsByPath(long startCiId, LinkedList<String> path, long targetMfstCiId, Map<Long, Map<String,List<Long>>> dependsOnMap) {
List<Long> listOfTargets = new ArrayList<Long>();
if (path.size() == 0) {
//we reached end of the path but seems like there are multiple routes, but at this point we are good
return listOfTargets;
}
String nextMfstClass = path.poll();
String bomClass = "bom." + trUtil.getLongShortClazzName(nextMfstClass);
//List<CmsRfcRelation> dependsOnRels = cmRfcMrgProcessor.getFromCIRelationsNakedNoAttrs(startCiId, null, "DependsOn", bomClass);
List<Long> targets = new ArrayList<Long>();
if (dependsOnMap.containsKey(startCiId)) {
if (dependsOnMap.get(startCiId).containsKey(bomClass)) {
targets.addAll(dependsOnMap.get(startCiId).get(bomClass));
}
}
if (path.size() ==0) {
//this should be our target list
for (long toCiId : targets) {
//lets check if this guy is related to the right mfstCi
//TODO this could be not nessesary
//if (cmRfcMrgProcessor.getToCIRelationsNakedNoAttrs(rel.getToCiId(), null, "RealizedAs", nextMfstClass).size() >0) {
listOfTargets.add(toCiId);
//}
}
} else {
for (long toCiId : targets) {
listOfTargets.addAll(getLeafsByPath(toCiId, new LinkedList<String>(path), targetMfstCiId, dependsOnMap));
}
}
return listOfTargets;
}
private List<String> getDpOnPath(long fromId, long endId) {
List<String> pathClasses = new ArrayList<String>();
List<CmsCIRelation> dponRels = cmProcessor.getFromCIRelations(fromId, null, "DependsOn", null);
for (CmsCIRelation dponRel : dponRels) {
if (dponRel.getToCi().getCiId() == endId) {
pathClasses.add(dponRel.getToCi().getCiClassName());
return pathClasses;
} else {
List<String> downClasses = getDpOnPath(dponRel.getToCiId(), endId);
if (downClasses.size() > 0) {
pathClasses.add(dponRel.getToCi().getCiClassName());
pathClasses.addAll(downClasses);
return pathClasses;
}
}
}
return pathClasses;
}
private Map<Long, List<BomRfc>> buildMfstToBomRfcMap(List<BomRfc> boms) {
Map<Long, List<BomRfc>> map = new HashMap<Long, List<BomRfc>>();
for (BomRfc bom : boms) {
if (!map.containsKey(bom.manifestCiId)) {
map.put(bom.manifestCiId, new ArrayList<BomRfc>());
}
map.get(bom.manifestCiId).add(bom);
}
return map;
}
private List<BomRfc> processNode(BomRfc node, Map<String, Integer> namesMap, CmsCIRelation binding, Map<String, List<String>> mfstIdEdge2nodeId, Map<Long,Map<String,List<CmsCIRelation>>> manifestDependsOnRels, int edgeNum, boolean usePercent, int recursionDepth){
if (recursionDepth >= MAX_RECUSION_DEPTH) {
String err = "Circular dependency detected, (level - " + recursionDepth + "),\n please check the platform diagram for " + extractPlatformNameFromNsPath(node.mfstCi.getNsPath());
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
if (edgeNum >= MAX_NUM_OF_EDGES) {
String err = "Max number of edges is reached - " + edgeNum + "\n please check the platform diagram for " + extractPlatformNameFromNsPath(node.mfstCi.getNsPath());
logger.error(err);
throw new TransistorException(CmsError.TRANSISTOR_CANNOT_TRAVERSE, err);
}
logger.info("working on " + node.ciName + "; recursion depth - " + recursionDepth);
List<BomRfc> newBoms = new ArrayList<BomRfc>();
if (node.isProcessed) {
return newBoms;
}
List<CmsCIRelation> mfstFromRels = null;
List<CmsCIRelation> mfstToRels = null;
if (!manifestDependsOnRels.containsKey(node.manifestCiId)) {
Map<String,List<CmsCIRelation>> rels = new HashMap<String,List<CmsCIRelation>>();
rels.put("from", cmProcessor.getFromCIRelations(node.manifestCiId, "manifest.DependsOn", null));
rels.put("to", cmProcessor.getToCIRelations(node.manifestCiId, "manifest.DependsOn", null));
manifestDependsOnRels.put(node.manifestCiId, rels);
}
mfstFromRels = manifestDependsOnRels.get(node.manifestCiId).get("from");
mfstToRels = manifestDependsOnRels.get(node.manifestCiId).get("to");;
//logger.info("got " + mfstFromRels.size() + " 'from' relations");
//logger.info("got " + mfstToRels.size() + " 'to' relations");
for (CmsCIRelation fromRel : mfstFromRels) {
int numEdges = 0;
int percent = 100;
int current = Integer.valueOf(fromRel.getAttribute("current").getDfValue());
if (current >1 && binding.getAttributes().containsKey("pct_scale") && binding.getAttribute("pct_scale") != null) {
int pctScale = Integer.valueOf(binding.getAttribute("pct_scale").getDjValue());
current = (int)Math.ceil(current*(pctScale/100.0)) ;
}
if (usePercent && fromRel.getAttribute("pct_dpmt") != null) {
percent = Integer.valueOf(fromRel.getAttribute("pct_dpmt").getDjValue());
numEdges = (int)Math.floor(current*(percent/100.0)) ;
} else {
numEdges = current;
}
int edgeNumLocal = edgeNum;
//special case if the relation marked as converge
if (fromRel.getAttribute("converge") != null
&& Boolean.valueOf(fromRel.getAttribute("converge").getDfValue())) {
edgeNumLocal = 1;
numEdges = 1;
}
String key = String.valueOf(fromRel.getToCi().getCiId()) + "-" + edgeNumLocal;
if (!mfstIdEdge2nodeId.containsKey(key)
|| numEdges > 1) {
//for (int i=node.getExisitngFromLinks(fromRel.getToCi().getCiId()).size()+1; i<=numEdges; i++) {
for (int i=node.getExisitngFromLinks(fromRel.getToCi().getCiId()).size() + 1 + ((edgeNumLocal-1) * numEdges); i<=numEdges + ((edgeNumLocal-1) * numEdges); i++) {
int newEdgeNum = (i > edgeNumLocal) ? i : edgeNumLocal;
BomRfc newBom = bootstrapNewBom(fromRel.getToCi(), namesMap, binding.getToCiId(), newEdgeNum);
BomLink link = new BomLink();
link.fromNodeId = node.nodeId;
link.fromMfstCiId = node.manifestCiId;
link.toNodeId = newBom.nodeId;
link.toMfstCiId = newBom.manifestCiId;
node.fromLinks.add(link);
newBom.toLinks.add(link);
newBoms.add(newBom);
key = String.valueOf(newBom.manifestCiId)+ "-" + newEdgeNum;
if (!mfstIdEdge2nodeId.containsKey(key)) mfstIdEdge2nodeId.put(key, new ArrayList<String>());
mfstIdEdge2nodeId.get(key).add(newBom.nodeId);
newBoms.addAll(processNode(newBom, namesMap, binding, mfstIdEdge2nodeId, manifestDependsOnRels, newEdgeNum, usePercent, recursionDepth + 1));
}
} else {
for (String toNodeId : mfstIdEdge2nodeId.get(key)) {
if (node.getExisitngFromLinks(fromRel.getToCi().getCiId()).size() == 0 ) {
BomLink link = new BomLink();
link.fromNodeId = node.nodeId;
link.fromMfstCiId = node.manifestCiId;
link.toNodeId = toNodeId;
link.toMfstCiId = fromRel.getToCi().getCiId();
node.fromLinks.add(link);
}
}
}
}
for (CmsCIRelation toRel : mfstToRels) {
String key = String.valueOf(toRel.getFromCi().getCiId()) + "-" + edgeNum;
if (!mfstIdEdge2nodeId.containsKey(key)) {
mfstIdEdge2nodeId.put(key, new ArrayList<String>());
if (node.getExisitngToLinks(toRel.getFromCi().getCiId()).size() == 0 ) {
BomRfc newBom = bootstrapNewBom(toRel.getFromCi(), namesMap, binding.getToCiId(), edgeNum);
BomLink link = new BomLink();
link.toNodeId = node.nodeId;
link.toMfstCiId = node.manifestCiId;
link.fromNodeId = newBom.nodeId;
link.fromMfstCiId = newBom.manifestCiId;
node.toLinks.add(link);
newBom.fromLinks.add(link);
newBoms.add(newBom);
mfstIdEdge2nodeId.get(String.valueOf(newBom.manifestCiId)+ "-" + edgeNum).add(newBom.nodeId);
newBoms.addAll(processNode(newBom, namesMap, binding, mfstIdEdge2nodeId, manifestDependsOnRels, edgeNum, usePercent, recursionDepth + 1));
}
} else {
for (String fromNodeId : mfstIdEdge2nodeId.get(key)) {
if (node.getExisitngToLinks(toRel.getFromCi().getCiId()).size() == 0 ) {
BomLink link = new BomLink();
link.toNodeId = node.nodeId;
link.toMfstCiId = node.manifestCiId;
link.fromNodeId = fromNodeId;
link.fromMfstCiId = toRel.getFromCi().getCiId();
node.toLinks.add(link);
}
}
}
}
node.isProcessed = true;
return newBoms;
}
private BomRfc bootstrapNewBom(CmsCI ci, Map<String, Integer> namesMap, long bindingId, int edgeNum) {
BomRfc newBom = new BomRfc();
newBom.manifestCiId = ci.getCiId();
newBom.mfstCi = ci;
newBom.ciName = getName(ci.getCiName(), namesMap, bindingId, edgeNum);
newBom.nodeId = newBom.manifestCiId + newBom.ciName;
return newBom;
}
private String getName(String base, Map<String, Integer> namesMap, long bindingId, int edgeNum) {
return base + "-" + bindingId + "-" + edgeNum;
}
private void applyCiToRfc(CmsRfcCI newRfc, BomRfc bom, Map<String, CmsClazzAttribute> mdAttrs, boolean checkExpression) {
newRfc.setCiName(bom.ciName);
newRfc.setComments(bom.mfstCi.getComments());
for (CmsCIAttribute mfstAttr : bom.mfstCi.getAttributes().values()) {
if (mdAttrs.containsKey(mfstAttr.getAttributeName())) {
if (mfstAttr.getDfValue() != null) {
if (newRfc.getAttribute(mfstAttr.getAttributeName()) != null) {
newRfc.getAttribute(mfstAttr.getAttributeName()).setNewValue(mfstAttr.getDfValue());
newRfc.getAttribute(mfstAttr.getAttributeName()).setComments(mfstAttr.getComments());
} else {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(mdAttrs.get(mfstAttr.getAttributeName()).getAttributeId());
rfcAttr.setAttributeName(mfstAttr.getAttributeName());
rfcAttr.setNewValue(mfstAttr.getDfValue());
newRfc.addAttribute(rfcAttr);
}
}
}
}
}
private void setCiId(CmsRfcCI rfc, CmsRfcCI existingRfc, CmsCI existingBomCi) {
if (existingRfc != null) {
rfc.setCiId(existingRfc.getCiId());
rfc.setRfcId(existingRfc.getRfcId());
rfc.setReleaseId(existingRfc.getReleaseId());
} else if (existingBomCi != null) {
rfc.setCiId(existingBomCi.getCiId());
rfc.setCiState(existingBomCi.getCiState());
}
}
private CmsRfcRelation bootstrapRelationRfc(long fromCiId, long toCiId, String relName, String nsPath, ExistingRels existingRels) {
CmsRfcRelation newRfc = new CmsRfcRelation();
newRfc.setNsPath(nsPath);
CmsRelation targetRelation = mdProcessor.getRelation(relName);
newRfc.setRelationId(targetRelation.getRelationId());
newRfc.setRelationName(targetRelation.getRelationName());
//bootstrap the default values from Class definition
for (CmsRelationAttribute relAttr : targetRelation.getMdAttributes()) {
if (relAttr.getDefaultValue() != null) {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(relAttr.getAttributeId());
rfcAttr.setAttributeName(relAttr.getAttributeName());
rfcAttr.setNewValue(relAttr.getDefaultValue());
newRfc.addAttribute(rfcAttr);
}
}
newRfc.setFromCiId(fromCiId);
newRfc.setToCiId(toCiId);
newRfc.setRelationGoid(String.valueOf(newRfc.getFromCiId()) + '-' + String.valueOf(newRfc.getRelationId()) + '-' +String.valueOf(newRfc.getToCiId()));
setCiRelationId(newRfc, existingRels.getOpenRelRfc(relName, fromCiId, toCiId), existingRels.getExistingRel(relName, fromCiId, toCiId));
return newRfc;
}
private CmsRfcRelation bootstrapRelationRfcWithAttributes(long fromCiId, long toCiId, String relName, String nsPath, Map<String,String> attrs, ExistingRels existingRels) {
CmsRfcRelation newRfc = new CmsRfcRelation();
newRfc.setNsPath(nsPath);
CmsRelation targetRelation = mdProcessor.getRelation(relName);
newRfc.setRelationId(targetRelation.getRelationId());
newRfc.setRelationName(targetRelation.getRelationName());
//bootstrap the default values from Class definition
for (CmsRelationAttribute relAttr : targetRelation.getMdAttributes()) {
if (relAttr.getDefaultValue() != null || attrs.containsKey(relAttr.getAttributeName())) {
CmsRfcAttribute rfcAttr = new CmsRfcAttribute();
rfcAttr.setAttributeId(relAttr.getAttributeId());
rfcAttr.setAttributeName(relAttr.getAttributeName());
if (attrs.containsKey(relAttr.getAttributeName())) {
rfcAttr.setNewValue(attrs.get(relAttr.getAttributeName()));
} else if (relAttr.getDefaultValue() != null){
rfcAttr.setNewValue(relAttr.getDefaultValue());
}
newRfc.addAttribute(rfcAttr);
}
}
newRfc.setFromCiId(fromCiId);
newRfc.setToCiId(toCiId);
setCiRelationId(newRfc, existingRels.getOpenRelRfc(relName, fromCiId, toCiId), existingRels.getExistingRel(relName, fromCiId, toCiId));
return newRfc;
}
private void setCiRelationId(CmsRfcRelation rfc, CmsRfcRelation existingRfc, CmsCIRelation existingRel) {
if (existingRfc != null) {
rfc.setCiRelationId(existingRfc.getCiRelationId());
rfc.setRfcId(existingRfc.getRfcId());
rfc.setReleaseId(existingRfc.getReleaseId());
} else if (existingRel != null){
rfc.setCiRelationId(existingRel.getCiRelationId());
}
}
private String extractPlatformNameFromNsPath(String ns) {
String[] nsParts = ns.split("/");
return nsParts[nsParts.length-2] + "(" + nsParts[nsParts.length-1] + ")";
}
private class BomRfc {
long manifestCiId;
CmsCI mfstCi;
int execOrder=0;
String ciName;
String nodeId;
CmsRfcCI rfc;
boolean isProcessed = false;
List<BomLink> fromLinks = new ArrayList<BomLink>();
List<BomLink> toLinks = new ArrayList<BomLink>();
public List<BomLink> getExisitngFromLinks(long toMfstCiId) {
List<BomLink> links = new ArrayList<BomLink>();
for (BomLink link : fromLinks) {
if (link.toMfstCiId == toMfstCiId) {
links.add(link);
}
}
return links;
}
public List<BomLink> getExisitngToLinks(long fromMfstCiId) {
List<BomLink> links = new ArrayList<BomLink>();
for (BomLink link : toLinks) {
if (link.fromMfstCiId == fromMfstCiId) {
links.add(link);
}
}
return links;
}
public BomLink getExisitngToLinks(String fromNodeId) {
for (BomLink link : toLinks) {
if (link.fromNodeId.equals(fromNodeId)) {
return link;
}
}
return null;
}
}
private class BomLink {
String fromNodeId;
long fromMfstCiId;
long toMfstCiId;
String toNodeId;
}
private class ExistingRels {
private Map<String, Map<String,CmsCIRelation>> existingRels;
private Map<String, Map<String,CmsRfcRelation>> openRelRfcs;
ExistingRels(String nsPath) {
this.existingRels = getExistingRelations(nsPath);
this.openRelRfcs = getOpenRelationsRfcs(nsPath);
}
protected CmsCIRelation getExistingRel(String relName, long fromCiId, long toCiId) {
if (existingRels.containsKey(relName)) {
return existingRels.get(relName).get(fromCiId + ":" + toCiId);
}
return null;
}
protected Collection<CmsCIRelation> getExistingRel(String relName) {
if (existingRels.containsKey(relName)) {
return existingRels.get(relName).values();
}
return new ArrayList<CmsCIRelation>(0);
}
/*
protected Collection<CmsRfcRelation> getExistingRelRfc(String relName) {
if (openRelRfcs.containsKey(relName)) {
return openRelRfcs.get(relName).values();
}
return new ArrayList<CmsRfcRelation>(0);
}
*/
protected void addRelRfc(CmsRfcRelation relRfc) {
String localKey = relRfc.getFromCiId() + ":" + relRfc.getToCiId();
if (!openRelRfcs.containsKey(relRfc.getRelationName())) {
openRelRfcs.put(relRfc.getRelationName(), new HashMap<String,CmsRfcRelation>());
}
openRelRfcs.get(relRfc.getRelationName()).put(localKey, relRfc);
}
protected CmsRfcRelation getOpenRelRfc(String relName, long fromCiId, long toCiId) {
if (openRelRfcs.containsKey(relName)) {
return openRelRfcs.get(relName).get(fromCiId + ":" + toCiId);
}
return null;
}
}
}
|
fixing an edge case with deployment rfc generation involving converge relation
|
src/main/java/com/oneops/transistor/service/BomRfcBulkProcessor.java
|
fixing an edge case with deployment rfc generation involving converge relation
|
|
Java
|
apache-2.0
|
b4c3542b89040c62543a1c380352cc98cd75d93a
| 0
|
DwayneJengSage/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,alxdarksage/BridgePF,Sage-Bionetworks/BridgePF,Sage-Bionetworks/BridgePF,DwayneJengSage/BridgePF,DwayneJengSage/BridgePF,alxdarksage/BridgePF
|
package org.sagebionetworks.bridge.upload;
import javax.annotation.Nonnull;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.sagebionetworks.bridge.dao.HealthDataDao;
import org.sagebionetworks.bridge.json.BridgeObjectMapper;
import org.sagebionetworks.bridge.json.DateUtils;
import org.sagebionetworks.bridge.models.healthdata.HealthDataRecordBuilder;
import org.sagebionetworks.bridge.models.studies.Study;
import org.sagebionetworks.bridge.models.upload.Upload;
import org.sagebionetworks.bridge.models.upload.UploadFieldDefinition;
import org.sagebionetworks.bridge.models.upload.UploadFieldType;
import org.sagebionetworks.bridge.models.upload.UploadSchema;
import org.sagebionetworks.bridge.services.UploadSchemaService;
// TODO: Currently, all apps are iOS-based. However, when we start having non-iOS apps, we'll need to restructure this
// so that it only runs in the iOS context.
@Component
public class IosSchemaValidationHandler implements UploadValidationHandler {
private static final Logger logger = LoggerFactory.getLogger(IosSchemaValidationHandler.class);
private static final Set<UploadFieldType> ATTACHMENT_TYPE_SET = EnumSet.of(UploadFieldType.ATTACHMENT_BLOB,
UploadFieldType.ATTACHMENT_CSV, UploadFieldType.ATTACHMENT_JSON_BLOB,
UploadFieldType.ATTACHMENT_JSON_TABLE);
private static final String FILENAME_INFO_JSON = "info.json";
private static final Pattern FILENAME_TIMESTAMP_PATTERN = Pattern.compile("-\\d{8,}");
private static final String KEY_ANSWERS = "answers";
private static final String KEY_FILENAME = "filename";
private static final String KEY_FILES = "files";
private static final String KEY_IDENTIFIER = "identifier";
private static final String KEY_ITEM = "item";
private static final String KEY_TASK_RUN = "taskRun";
private static final String KEY_TASK_RUN_ID = "taskRunId";
private static final String KEY_TIMESTAMP = "timestamp";
private static final String SCHEMA_IOS_SURVEY = "ios-survey";
// Note: some surveys have both questionType and questionTypeName. Some surveys only have questionType as a string.
// To maximize compatibility, we only check for questionType.
private static final Set<String> SURVEY_KEY_SET = ImmutableSet.of("endDate", "item", "questionType", "startDate");
private HealthDataDao healthDataDao;
private UploadSchemaService uploadSchemaService;
@Autowired
public void setHealthDataDao(HealthDataDao healthDataDao) {
this.healthDataDao = healthDataDao;
}
@Autowired
public void setUploadSchemaService(UploadSchemaService uploadSchemaService) {
this.uploadSchemaService = uploadSchemaService;
}
// iOS data comes from a third party, and we have no control over the data format. So our data validation needs to
// be as flexible as possible. Which means our error handling strategy is to write a warning to the logs, and then
// attempt to recover. This will, however, cause cascading errors further down the validation chain.
@Override
public void handle(@Nonnull UploadValidationContext context)
throws UploadValidationException {
Map<String, JsonNode> jsonDataMap = context.getJsonDataMap();
Map<String, byte[]> unzippedDataMap = context.getUnzippedDataMap();
Upload upload = context.getUpload();
String uploadId = upload.getUploadId();
Study study = context.getStudy();
String studyId = study.getIdentifier();
// Add empty record builder and attachment map to the context. We'll fill these in as we need them.
HealthDataRecordBuilder recordBuilder = healthDataDao.getRecordBuilder();
context.setHealthDataRecordBuilder(recordBuilder);
Map<String, byte[]> attachmentMap = new HashMap<>();
context.setAttachmentsByFieldName(attachmentMap);
// health data records fields
recordBuilder.withHealthCode(upload.getHealthCode());
recordBuilder.withStudyId(studyId);
recordBuilder.withUploadDate(upload.getUploadDate());
recordBuilder.withUploadId(uploadId);
// create an empty object node in our record builder, which we'll fill in as we go
ObjectNode dataMap = BridgeObjectMapper.get().createObjectNode();
recordBuilder.withData(dataMap);
// Use info.json verbatim is the metadata.
JsonNode infoJson = getInfoJsonFile(context, uploadId, jsonDataMap);
recordBuilder.withMetadata(infoJson);
// extract other data from info.json
JsonNode taskRunNode = infoJson.get(KEY_TASK_RUN);
String taskRunId = taskRunNode != null ? taskRunNode.textValue() : null;
JsonNode itemNode = infoJson.get(KEY_ITEM);
if (itemNode == null) {
// fall back to "identifier"
itemNode = infoJson.get(KEY_IDENTIFIER);
}
String item = itemNode != null ? itemNode.textValue() : null;
// validate and normalize filenames
validateInfoJsonFileList(context, uploadId, jsonDataMap, unzippedDataMap, infoJson, recordBuilder);
removeTimestampsFromFilenames(jsonDataMap);
removeTimestampsFromFilenames(unzippedDataMap);
// get schemas
// TODO: cache this
List<UploadSchema> schemaList = uploadSchemaService.getUploadSchemasForStudy(study);
UploadSchema surveySchema = uploadSchemaService.getUploadSchema(study, SCHEMA_IOS_SURVEY);
if (!unzippedDataMap.isEmpty()) {
handleNonJsonData(context, uploadId, jsonDataMap, unzippedDataMap, item, schemaList, recordBuilder,
attachmentMap);
} else {
// This means our data is in JSON format, so we can look inside it to figure out what it is.
if (isSurvey(jsonDataMap)) {
handleSurvey(context, uploadId, jsonDataMap, item, taskRunId, surveySchema, recordBuilder,
attachmentMap, dataMap);
} else {
handleJsonData(context, uploadId, jsonDataMap, schemaList, recordBuilder, attachmentMap, dataMap);
}
}
}
private static JsonNode getInfoJsonFile(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap) {
JsonNode infoJson = jsonDataMap.get(FILENAME_INFO_JSON);
if (infoJson == null) {
// Recover by replacing this with an empty map
addMessageAndWarn(context, String.format("upload ID %s does not contain info.json file", uploadId));
infoJson = BridgeObjectMapper.get().createObjectNode();
// Add it back to the jsonDataMap, since all the logic assumes it contains info.json.
jsonDataMap.put(FILENAME_INFO_JSON, infoJson);
}
return infoJson;
}
private static void validateInfoJsonFileList(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, Map<String, byte[]> unzippedDataMap, JsonNode infoJson,
HealthDataRecordBuilder recordBuilder) {
// Make sure all files specified by info.json are accounted for.
// Because ParseJsonHandler moves files from unzippedDataMap to jsonDataMap, there is no overlap between the
// two maps.
Set<String> fileNameSet = new HashSet<>();
fileNameSet.addAll(jsonDataMap.keySet());
fileNameSet.addAll(unzippedDataMap.keySet());
// fileList.size() should be exactly 1 less than fileNameSet.size(), because fileList.size() doesn't include
// info.json.
JsonNode fileList = infoJson.get(KEY_FILES);
if (fileList == null) {
// Recover by replacing this with an empty list
addMessageAndWarn(context,
String.format("upload ID %s info.json does not contain file list", uploadId));
fileList = BridgeObjectMapper.get().createArrayNode();
} else if (fileList.size() == 0) {
addMessageAndWarn(context, String.format("upload ID %s info.json contains empty file list", uploadId));
} else if (fileList.size() != fileNameSet.size() - 1) {
addMessageAndWarn(context, String.format("upload ID %s info.json reports %d files, but we found %d files",
uploadId, fileList.size(), fileNameSet.size() - 1));
}
DateTime createdOn = null;
Map<String, JsonNode> infoJsonFilesByName = new HashMap<>();
for (JsonNode oneFileJson : fileList) {
// validate filename
JsonNode filenameNode = oneFileJson.get(KEY_FILENAME);
String filename = null;
if (filenameNode == null) {
addMessageAndWarn(context, String.format("upload ID %s info.json contains file with no name",
uploadId));
} else {
filename = filenameNode.textValue();
if (!fileNameSet.contains(filename)) {
addMessageAndWarn(context, String.format(
"upload ID %s info.json contains filename %s, not found in the archive", uploadId,
filename));
}
infoJsonFilesByName.put(filename, oneFileJson);
}
// Calculate createdOn timestamp. Each file in the file list has its own timestamp. Canonical createdOn is
// the latest of these timestamps.
JsonNode timestampNode = oneFileJson.get(KEY_TIMESTAMP);
if (timestampNode == null) {
addMessageAndWarn(context, String.format("upload ID %s filename %s has no timestamp", uploadId,
filename));
} else {
DateTime timestamp = parseTimestampHelper(context, uploadId, filename, timestampNode.textValue());
if (createdOn == null || timestamp.isAfter(createdOn)) {
createdOn = timestamp;
}
}
}
// sanity check filenames with the info.json file list
for (String oneFilename : fileNameSet) {
if (!oneFilename.equals(FILENAME_INFO_JSON) && !infoJsonFilesByName.containsKey(oneFilename)) {
addMessageAndWarn(context, String.format(
"upload ID %s contains filename %s not found in info.json", uploadId, oneFilename));
}
}
if (createdOn == null) {
// Recover by using current time.
addMessageAndWarn(context, String.format("upload ID %s has no timestamps, using current time", uploadId));
createdOn = DateUtils.getCurrentDateTime();
}
recordBuilder.withCreatedOn(createdOn.getMillis());
}
private static <T> void removeTimestampsFromFilenames(Map<String, T> fileMap) {
// Sometimes filenames include timestamps. This breaks parsing, since we use filenames as key prefixes.
// Normalize the filenames by removing timestamps. Assume any string of 8 or more digit is a timestamps. Also
// remove the dash at the start of a timestamp. Filenames are generally unique even without timestamps, so we
// don't have to worry about duplicate filenames.
// Make a copy of the file map key set. This way, we can iterate over the filenames and modify the map without
// hitting concurrent modification exceptions.
ImmutableSet<String> filenameSet = ImmutableSet.copyOf(fileMap.keySet());
for (String oneFilename : filenameSet) {
Matcher filenameMatcher = FILENAME_TIMESTAMP_PATTERN.matcher(oneFilename);
if (filenameMatcher.find()) {
T fileData = fileMap.remove(oneFilename);
String newFilename = filenameMatcher.replaceAll("");
fileMap.put(newFilename, fileData);
}
}
}
private static void handleNonJsonData(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, Map<String, byte[]> unzippedDataMap, String infoJsonItem,
List<UploadSchema> schemaList, HealthDataRecordBuilder recordBuilder, Map<String, byte[]> attachmentMap)
throws UploadValidationException {
// Attempting to parse into the non-JSON data is an exercise in madness. Our best strategy here is to
// match the "item" field in info.json with one of the schema names, and pick the one with the latest
// revision.
if (StringUtils.isBlank(infoJsonItem)) {
// No "item" field means we have no one of identifying this. Game over.
throw new UploadValidationException(
"info.json in non-JSON upload has blank \"item\" field to identify the schema with.");
}
// Try to find the schema.
UploadSchema latestSchema = null;
for (UploadSchema oneSchema : schemaList) {
if (oneSchema.getName().equals(infoJsonItem)) {
if (latestSchema == null || oneSchema.getRevision() > latestSchema.getRevision()) {
latestSchema = oneSchema;
}
}
}
if (latestSchema == null) {
// No schema, no health data record. Game over.
throw new UploadValidationException(String.format("No schema found for item %s", infoJsonItem));
}
// We found the schema.
String schemaId = latestSchema.getSchemaId();
int schemaRev = latestSchema.getRevision();
recordBuilder.withSchemaId(schemaId);
recordBuilder.withSchemaRevision(schemaRev);
// Schema should have a field that's in ATTACHMENT_TYPE_SET, to store the attachment ref in.
List<UploadFieldDefinition> fieldDefList = latestSchema.getFieldDefinitions();
if (fieldDefList.isEmpty()) {
// No fields at all? Game over.
throw new UploadValidationException(String.format("Identified schema ID %s rev %d has no fields",
schemaId, schemaRev));
}
// Find fields of type in ATTACHMENT_TYPE_SET to store the attachment ref in. The field name is the same as the
// filename.
Set<String> fieldNameSet = new HashSet<>();
for (UploadFieldDefinition oneFieldDef : fieldDefList) {
String fieldName = oneFieldDef.getName();
fieldNameSet.add(fieldName);
if (ATTACHMENT_TYPE_SET.contains(oneFieldDef.getType())) {
byte[] data = unzippedDataMap.get(fieldName);
if (data != null) {
// Write this to the attachment map. UploadArtifactsHandler will take care of the rest.
attachmentMap.put(fieldName, data);
} else {
JsonNode jsonData = jsonDataMap.get(fieldName);
if (jsonData != null) {
// Convert to raw bytes, then add to attachment map.
try {
attachmentMap.put(fieldName, BridgeObjectMapper.get().writeValueAsBytes(jsonData));
} catch (JsonProcessingException ex) {
addMessageAndWarn(context, String.format(
"Upload ID %s attachment field %s could not be converted to JSON: %s", uploadId,
fieldName, ex.getMessage()));
}
} else if (oneFieldDef.isRequired()) {
addMessageAndWarn(context, String.format(
"Upload ID %s with schema ID %s has required field %s with no corresponding file",
uploadId,
schemaId, fieldName));
}
}
} else {
addMessageAndWarn(context, String.format("Upload ID %s with schema ID %s has non-attachment field %s",
uploadId, schemaId, fieldName));
}
}
// validate file names against field names
for (String oneFilename : unzippedDataMap.keySet()) {
if (!fieldNameSet.contains(oneFilename)) {
addMessageAndWarn(context, String.format(
"Upload ID %s with schema ID %s has file %s with no corresponding field", uploadId, schemaId,
oneFilename));
}
}
for (String oneJsonFilename : jsonDataMap.keySet()) {
if (oneJsonFilename.equals(FILENAME_INFO_JSON)) {
// skip info.json
continue;
}
if (!fieldNameSet.contains(oneJsonFilename)) {
addMessageAndWarn(context, String.format(
"Upload ID %s with schema ID %s has JSON file %s with no corresponding field", uploadId,
schemaId, oneJsonFilename));
}
}
}
private static boolean isSurvey(Map<String, JsonNode> jsonDataMap) {
// Degenerate case: There's only one file and it's info.json. This is vacuously not a survey.
if (jsonDataMap.size() == 1 && jsonDataMap.containsKey(FILENAME_INFO_JSON)) {
return false;
}
// Check if it's a survey. We can tell if it's a survey by looking at one of the JSON files (other than
// info.json) and looking for specific survey keys, as listed in SURVEY_KEY_SET.
for (Map.Entry<String, JsonNode> oneJsonFile : jsonDataMap.entrySet()) {
if (oneJsonFile.getKey().equals(FILENAME_INFO_JSON)) {
// Not info.json. Skip.
continue;
}
// There may be fields beyond what's specified in SURVEY_KEY_SET. This is normal, since every question
// type has its own special fields.
JsonNode oneJsonFileNode = oneJsonFile.getValue();
Set<String> fieldNameSet = ImmutableSet.copyOf(oneJsonFileNode.fieldNames());
if (!fieldNameSet.containsAll(SURVEY_KEY_SET)) {
// This JSON file doesn't have the required keys for surveys. We know it's not a survey, so
// short-circuit and return false.
return false;
}
}
// If we make it this far, that means all files we saw are surveys.
return true;
}
private static void handleSurvey(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, String infoJsonItem, String taskRunId, UploadSchema surveySchema,
HealthDataRecordBuilder recordBuilder, Map<String, byte[]> attachmentMap, ObjectNode dataMap) {
// Currently, the 3rd party iOS apps don't tag surveys or questions with guids. (In fact, some of the
// surveys aren't even in the Surveys table yet.) So we can't store survey answers in the Survey
// Responses table. Instead, let's take all the answers, create a big ATTACHMENT_JSON_TABLE out of
// them.
ArrayNode answerArray = BridgeObjectMapper.get().createArrayNode();
for (Map.Entry<String, JsonNode> oneJsonFile : jsonDataMap.entrySet()) {
if (oneJsonFile.getKey().equals(FILENAME_INFO_JSON)) {
// Not info.json. Skip.
continue;
}
// add the JSON directly to our object. We're making a table.
answerArray.add(oneJsonFile.getValue());
}
// answers should be treated as an attachment
try {
attachmentMap.put(KEY_ANSWERS, BridgeObjectMapper.get().writeValueAsBytes(answerArray));
} catch (JsonProcessingException ex) {
addMessageAndWarn(context, String.format(
"Upload ID %s could not convert survey answers to JSON: %s", uploadId, ex.getMessage()));
}
// also, add item and taskRun to dataMap
dataMap.put(KEY_ITEM, infoJsonItem);
dataMap.put(KEY_TASK_RUN_ID, taskRunId);
// get the survey schema
recordBuilder.withSchemaId(surveySchema.getSchemaId());
recordBuilder.withSchemaRevision(surveySchema.getRevision());
}
private static void handleJsonData(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, List<UploadSchema> schemaList, HealthDataRecordBuilder recordBuilder,
Map<String, byte[]> attachmentMap, ObjectNode dataMap) throws UploadValidationException {
// JSON data may contain more than one JSON file. However, Health Data Records stores a single map.
// Flatten all the JSON maps together (other than info.json).
Map<String, JsonNode> dataFieldMap = flattenJsonDataMap(jsonDataMap);
Set<String> keySet = dataFieldMap.keySet();
// select schema
UploadSchema schema = selectSchema(context, uploadId, dataFieldMap, schemaList);
if (schema == null) {
// No schema, no health data record. Game over.
throw new UploadValidationException(String.format("No schema found for keys (%s)",
Joiner.on(", ").join(keySet)));
}
// We found the schema.
recordBuilder.withSchemaId(schema.getSchemaId());
recordBuilder.withSchemaRevision(schema.getRevision());
// copy fields to health data
copyJsonDataToHealthData(context, uploadId, dataFieldMap, schema, dataMap, attachmentMap);
}
private static Map<String, JsonNode> flattenJsonDataMap(Map<String, JsonNode> jsonDataMap) {
Map<String, JsonNode> dataFieldMap = new HashMap<>();
for (Map.Entry<String, JsonNode> oneJsonFile : jsonDataMap.entrySet()) {
String filename = oneJsonFile.getKey();
if (filename.equals(FILENAME_INFO_JSON)) {
// Not info.json. Skip.
continue;
}
JsonNode oneJsonFileNode = oneJsonFile.getValue();
Iterator<String> fieldNameIter = oneJsonFileNode.fieldNames();
while (fieldNameIter.hasNext()) {
// Pre-pend file name with field name, so if there are duplicate filenames, they get disambiguated.
String oneFieldName = fieldNameIter.next();
dataFieldMap.put(filename + "." + oneFieldName, oneJsonFileNode.get(oneFieldName));
}
}
return dataFieldMap;
}
private static UploadSchema selectSchema(UploadValidationContext context, String uploadId,
Map<String, JsonNode> dataFieldMap, List<UploadSchema> schemaList) {
// cross-ref our data map with our schema list to see which schema matches
UploadSchema latestSchema = null;
for (UploadSchema oneSchema : schemaList) {
Set<String> schemaKeySet = new HashSet<>();
// strategy: assume the schema matches, unless we find a field that doesn't match
boolean isMatch = true;
for (UploadFieldDefinition oneFieldDef : oneSchema.getFieldDefinitions()) {
String fieldName = oneFieldDef.getName();
schemaKeySet.add(fieldName);
JsonNode fieldValue = dataFieldMap.get(fieldName);
isMatch = validateField(context, uploadId, fieldValue, oneFieldDef);
if (!isMatch) {
// we already know it's not a match, so we can short-circuit
break;
}
}
if (isMatch && !schemaKeySet.containsAll(dataFieldMap.keySet())) {
// There are keys in the JSON that aren't present in the schema. This may refer to a different
// revision of the schema, or to a different schema entirely. Mark it as not a match.
// (Only check if we haven't already flagged it as non-match. Otherwise, this check may be
// meaningless.)
isMatch = false;
}
if (isMatch) {
// If we have more than one match, choose the one with the highest revision, as that's the one
// that's newest. (This assumes that while schema revisions may be the same, entirely different
// schemas will be different.(
if (latestSchema == null || oneSchema.getRevision() > latestSchema.getRevision()) {
latestSchema = oneSchema;
}
}
}
return latestSchema;
}
private static boolean validateField(UploadValidationContext context, String uploadId, JsonNode fieldValue,
UploadFieldDefinition fieldDef) {
if (fieldValue == null) {
// if field is not required, we're fine
return !fieldDef.isRequired();
} else {
switch (fieldDef.getType()) {
case ATTACHMENT_BLOB:
case ATTACHMENT_CSV:
// Attachment blob and csv expect non-JSON data, and we don't mix JSON with
// non-JSON data, so this is not a match.
return false;
case ATTACHMENT_JSON_BLOB:
case INLINE_JSON_BLOB:
// JSON blobs are always JSON blobs. We don't need to do any special validation.
return true;
case ATTACHMENT_JSON_TABLE:
// Basic sanity check: the outermost layer of the blob should be an array.
if (!fieldValue.isArray()) {
return false;
}
// Basic sanity check 2: The first element of the array is an object node.
JsonNode firstRow = fieldValue.get(0);
return (firstRow != null && firstRow.isObject());
case BOOLEAN:
return fieldValue.isBoolean();
case CALENDAR_DATE:
// We expect a string. Also, the string should be parseable by Joda LocalDate.
if (!fieldValue.isTextual()) {
return false;
}
try {
// DateUtils calls through to Joda parseLocalDate(), which is documented as
// never returning null. So we don't need to null check here.
DateUtils.parseCalendarDate(fieldValue.textValue());
return true;
} catch (RuntimeException ex) {
return false;
}
case FLOAT:
// includes floats, doubles, and decimals
return fieldValue.isFloatingPointNumber();
case INT:
// includes ints, longs, and big ints
return fieldValue.isIntegralNumber();
case STRING:
return fieldValue.isTextual();
case TIMESTAMP:
// either it's a string in ISO format, or it's a long in epoch milliseconds
if (fieldValue.isTextual()) {
try {
DateTime dateTime = parseTimestampHelper(context, uploadId, null,
fieldValue.textValue());
return (dateTime != null);
} catch (RuntimeException ex) {
return false;
}
} else if (fieldValue.isIntegralNumber()) {
try {
new DateTime(fieldValue.longValue());
return true;
} catch (RuntimeException ex) {
return false;
}
} else {
return false;
}
default:
// This should never happen, but just in case we add a new field to UploadFieldType
// but forget to upload this switch.
return false;
}
}
}
private static void copyJsonDataToHealthData(UploadValidationContext context, String uploadId,
Map<String, JsonNode> dataFieldMap, UploadSchema schema, ObjectNode dataMap,
Map<String, byte[]> attachmentMap) {
// Using schema, copy fields over to data map. Or if it's an attachment, add it to the attachment map.
for (UploadFieldDefinition oneFieldDef : schema.getFieldDefinitions()) {
String fieldName = oneFieldDef.getName();
JsonNode fieldValue = dataFieldMap.get(fieldName);
if (ATTACHMENT_TYPE_SET.contains(oneFieldDef.getType())) {
try {
attachmentMap.put(fieldName, BridgeObjectMapper.get().writeValueAsBytes(fieldValue));
} catch (JsonProcessingException ex) {
addMessageAndWarn(context, String.format(
"Upload ID %s field %s could not be converted to JSON: %s", uploadId, fieldName,
ex.getMessage()));
}
} else {
dataMap.set(fieldName, dataFieldMap.get(fieldName));
}
}
}
// For some reason, the iOS apps are sending timestamps in form "YYYY-MM-DD hh:mm:ss +ZZZZ", which is
// non-ISO-compliant and can't be parsed by JodaTime. We'll need to convert these to ISO format, generally
// "YYYY-MM-DDThh:mm:ss+ZZZZ".
// TODO: Remove this hack when it's no longer needed.
private static DateTime parseTimestampHelper(UploadValidationContext context, String uploadId, String filename,
String timestampStr) {
if (StringUtils.isBlank(timestampStr)) {
addMessageAndWarn(context, String.format("upload ID %s filename %s has blank time stamp", uploadId,
filename));
return null;
}
// Detect if this is iOS non-standard format by checking to see if the 10th char is a space.
if (timestampStr.charAt(10) == ' ') {
addMessageAndWarn(context, String.format("upload ID %s filename %s has non-standard timestamp format %s",
uploadId, filename, timestampStr));
// Attempt to convert this by replacing the 10th char with a T and then stripping out all spaces.
timestampStr = timestampStr.substring(0, 10) + 'T' + timestampStr.substring(11);
timestampStr = timestampStr.replaceAll("\\s+", "");
}
try {
return DateUtils.parseISODateTime(timestampStr);
} catch (RuntimeException ex) {
addMessageAndWarn(context, String.format("upload ID %s filename %s has invalid timestamp %s", uploadId,
filename, timestampStr));
return null;
}
}
private static void addMessageAndWarn(UploadValidationContext context, String message) {
context.addMessage(message);
logger.warn(message);
}
}
|
app/org/sagebionetworks/bridge/upload/IosSchemaValidationHandler.java
|
package org.sagebionetworks.bridge.upload;
import javax.annotation.Nonnull;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableSet;
import org.apache.commons.lang3.StringUtils;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.sagebionetworks.bridge.dao.HealthDataDao;
import org.sagebionetworks.bridge.json.BridgeObjectMapper;
import org.sagebionetworks.bridge.json.DateUtils;
import org.sagebionetworks.bridge.models.healthdata.HealthDataRecordBuilder;
import org.sagebionetworks.bridge.models.studies.Study;
import org.sagebionetworks.bridge.models.upload.Upload;
import org.sagebionetworks.bridge.models.upload.UploadFieldDefinition;
import org.sagebionetworks.bridge.models.upload.UploadFieldType;
import org.sagebionetworks.bridge.models.upload.UploadSchema;
import org.sagebionetworks.bridge.services.UploadSchemaService;
// TODO: Currently, all apps are iOS-based. However, when we start having non-iOS apps, we'll need to restructure this
// so that it only runs in the iOS context.
@Component
public class IosSchemaValidationHandler implements UploadValidationHandler {
private static final Logger logger = LoggerFactory.getLogger(IosSchemaValidationHandler.class);
private static final Set<UploadFieldType> ATTACHMENT_TYPE_SET = EnumSet.of(UploadFieldType.ATTACHMENT_BLOB,
UploadFieldType.ATTACHMENT_CSV, UploadFieldType.ATTACHMENT_JSON_BLOB,
UploadFieldType.ATTACHMENT_JSON_TABLE);
private static final String FILENAME_INFO_JSON = "info.json";
private static final Pattern FILENAME_TIMESTAMP_PATTERN = Pattern.compile("-\\d{8,}");
private static final String KEY_ANSWERS = "answers";
private static final String KEY_FILENAME = "filename";
private static final String KEY_FILES = "files";
private static final String KEY_IDENTIFIER = "identifier";
private static final String KEY_ITEM = "item";
private static final String KEY_TASK_RUN = "taskRun";
private static final String KEY_TASK_RUN_ID = "taskRunId";
private static final String KEY_TIMESTAMP = "timestamp";
private static final String SCHEMA_IOS_SURVEY = "ios-survey";
// Note: some surveys have both questionType and questionTypeName. Some surveys only have questionType as a string.
// To maximize compatibility, we only check for questionType.
private static final Set<String> SURVEY_KEY_SET = ImmutableSet.of("endDate", "item", "questionType", "startDate");
private HealthDataDao healthDataDao;
private UploadSchemaService uploadSchemaService;
@Autowired
public void setHealthDataDao(HealthDataDao healthDataDao) {
this.healthDataDao = healthDataDao;
}
@Autowired
public void setUploadSchemaService(UploadSchemaService uploadSchemaService) {
this.uploadSchemaService = uploadSchemaService;
}
// iOS data comes from a third party, and we have no control over the data format. So our data validation needs to
// be as flexible as possible. Which means our error handling strategy is to write a warning to the logs, and then
// attempt to recover. This will, however, cause cascading errors further down the validation chain.
@Override
public void handle(@Nonnull UploadValidationContext context)
throws UploadValidationException {
Map<String, JsonNode> jsonDataMap = context.getJsonDataMap();
Map<String, byte[]> unzippedDataMap = context.getUnzippedDataMap();
Upload upload = context.getUpload();
String uploadId = upload.getUploadId();
Study study = context.getStudy();
String studyId = study.getIdentifier();
// Add empty record builder and attachment map to the context. We'll fill these in as we need them.
HealthDataRecordBuilder recordBuilder = healthDataDao.getRecordBuilder();
context.setHealthDataRecordBuilder(recordBuilder);
Map<String, byte[]> attachmentMap = new HashMap<>();
context.setAttachmentsByFieldName(attachmentMap);
// health data records fields
recordBuilder.withHealthCode(upload.getHealthCode());
recordBuilder.withStudyId(studyId);
recordBuilder.withUploadDate(upload.getUploadDate());
recordBuilder.withUploadId(uploadId);
// create an empty object node in our record builder, which we'll fill in as we go
ObjectNode dataMap = BridgeObjectMapper.get().createObjectNode();
recordBuilder.withData(dataMap);
// Use info.json verbatim is the metadata.
JsonNode infoJson = getInfoJsonFile(context, uploadId, jsonDataMap);
recordBuilder.withMetadata(infoJson);
// extract other data from info.json
JsonNode taskRunNode = infoJson.get(KEY_TASK_RUN);
String taskRunId = taskRunNode != null ? taskRunNode.textValue() : null;
JsonNode itemNode = infoJson.get(KEY_ITEM);
if (itemNode == null) {
// fall back to "identifier"
itemNode = infoJson.get(KEY_IDENTIFIER);
}
String item = itemNode != null ? itemNode.textValue() : null;
// validate and normalize filenames
validateInfoJsonFileList(context, uploadId, jsonDataMap, unzippedDataMap, infoJson, recordBuilder);
removeTimestampsFromFilenames(jsonDataMap);
removeTimestampsFromFilenames(unzippedDataMap);
// get schemas
// TODO: cache this
List<UploadSchema> schemaList = uploadSchemaService.getUploadSchemasForStudy(study);
UploadSchema surveySchema = uploadSchemaService.getUploadSchema(study, SCHEMA_IOS_SURVEY);
if (!unzippedDataMap.isEmpty()) {
handleNonJsonData(context, uploadId, jsonDataMap, unzippedDataMap, item, schemaList, recordBuilder,
attachmentMap);
} else {
// This means our data is in JSON format, so we can look inside it to figure out what it is.
if (isSurvey(jsonDataMap)) {
handleSurvey(context, uploadId, jsonDataMap, item, taskRunId, surveySchema, recordBuilder,
attachmentMap, dataMap);
} else {
handleJsonData(context, uploadId, jsonDataMap, schemaList, recordBuilder, attachmentMap, dataMap);
}
}
}
private static JsonNode getInfoJsonFile(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap) {
JsonNode infoJson = jsonDataMap.get(FILENAME_INFO_JSON);
if (infoJson == null) {
// Recover by replacing this with an empty map
addMessageAndWarn(context, String.format("upload ID %s does not contain info.json file", uploadId));
infoJson = BridgeObjectMapper.get().createObjectNode();
// Add it back to the jsonDataMap, since all the logic assumes it contains info.json.
jsonDataMap.put(FILENAME_INFO_JSON, infoJson);
}
return infoJson;
}
private static void validateInfoJsonFileList(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, Map<String, byte[]> unzippedDataMap, JsonNode infoJson,
HealthDataRecordBuilder recordBuilder) {
// Make sure all files specified by info.json are accounted for.
// Because ParseJsonHandler moves files from unzippedDataMap to jsonDataMap, there is no overlap between the
// two maps.
Set<String> fileNameSet = new HashSet<>();
fileNameSet.addAll(jsonDataMap.keySet());
fileNameSet.addAll(unzippedDataMap.keySet());
// fileList.size() should be exactly 1 less than fileNameSet.size(), because fileList.size() doesn't include
// info.json.
JsonNode fileList = infoJson.get(KEY_FILES);
if (fileList == null) {
// Recover by replacing this with an empty list
addMessageAndWarn(context,
String.format("upload ID %s info.json does not contain file list", uploadId));
fileList = BridgeObjectMapper.get().createArrayNode();
} else if (fileList.size() == 0) {
addMessageAndWarn(context, String.format("upload ID %s info.json contains empty file list", uploadId));
} else if (fileList.size() != fileNameSet.size() - 1) {
addMessageAndWarn(context, String.format("upload ID %s info.json reports %d files, but we found %d files",
uploadId, fileList.size(), fileNameSet.size() - 1));
}
DateTime createdOn = null;
Map<String, JsonNode> infoJsonFilesByName = new HashMap<>();
for (JsonNode oneFileJson : fileList) {
// validate filename
JsonNode filenameNode = oneFileJson.get(KEY_FILENAME);
String filename = null;
if (filenameNode == null) {
addMessageAndWarn(context, String.format("upload ID %s info.json contains file with no name",
uploadId));
} else {
filename = filenameNode.textValue();
if (!fileNameSet.contains(filename)) {
addMessageAndWarn(context, String.format(
"upload ID %s info.json contains filename %s, not found in the archive", uploadId,
filename));
}
infoJsonFilesByName.put(filename, oneFileJson);
}
// Calculate createdOn timestamp. Each file in the file list has its own timestamp. Canonical createdOn is
// the latest of these timestamps.
JsonNode timestampNode = oneFileJson.get(KEY_TIMESTAMP);
if (timestampNode == null) {
addMessageAndWarn(context, String.format("upload ID %s filename %s has no timestamp", uploadId,
filename));
} else {
DateTime timestamp = parseTimestampHelper(context, uploadId, filename, timestampNode.textValue());
if (createdOn == null || timestamp.isAfter(createdOn)) {
createdOn = timestamp;
}
}
}
// sanity check filenames with the info.json file list
for (String oneFilename : fileNameSet) {
if (!oneFilename.equals(FILENAME_INFO_JSON) && !infoJsonFilesByName.containsKey(oneFilename)) {
addMessageAndWarn(context, String.format(
"upload ID %s contains filename %s not found in info.json", uploadId, oneFilename));
}
}
if (createdOn == null) {
// Recover by using current time.
addMessageAndWarn(context, String.format("upload ID %s has no timestamps, using current time", uploadId));
createdOn = DateUtils.getCurrentDateTime();
}
recordBuilder.withCreatedOn(createdOn.getMillis());
}
private static <T> void removeTimestampsFromFilenames(Map<String, T> fileMap) {
// Sometimes filenames include timestamps. This breaks parsing, since we use filenames as key prefixes.
// Normalize the filenames by removing timestamps. Assume any string of 8 or more digit is a timestamps. Also
// remove the dash at the start of a timestamp. Filenames are generally unique even without timestamps, so we
// don't have to worry about duplicate filenames.
// Make a copy of the file map key set. This way, we can iterate over the filenames and modify the map without
// hitting concurrent modification exceptions.
ImmutableSet<String> filenameSet = ImmutableSet.copyOf(fileMap.keySet());
for (String oneFilename : filenameSet) {
Matcher filenameMatcher = FILENAME_TIMESTAMP_PATTERN.matcher(oneFilename);
if (filenameMatcher.find()) {
T fileData = fileMap.remove(oneFilename);
String newFilename = filenameMatcher.replaceAll("");
fileMap.put(newFilename, fileData);
}
}
}
private static void handleNonJsonData(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, Map<String, byte[]> unzippedDataMap, String infoJsonItem,
List<UploadSchema> schemaList, HealthDataRecordBuilder recordBuilder, Map<String, byte[]> attachmentMap)
throws UploadValidationException {
// Attempting to parse into the non-JSON data is an exercise in madness. Our best strategy here is to
// match the "item" field in info.json with one of the schema names, and pick the one with the latest
// revision.
if (StringUtils.isBlank(infoJsonItem)) {
// No "item" field means we have no one of identifying this. Game over.
throw new UploadValidationException(
"info.json in non-JSON upload has blank \"item\" field to identify the schema with.");
}
// Try to find the schema.
UploadSchema latestSchema = null;
for (UploadSchema oneSchema : schemaList) {
if (oneSchema.getName().equals(infoJsonItem)) {
if (latestSchema == null || oneSchema.getRevision() > latestSchema.getRevision()) {
latestSchema = oneSchema;
}
}
}
if (latestSchema == null) {
// No schema, no health data record. Game over.
throw new UploadValidationException(String.format("No schema found for item %s", infoJsonItem));
}
// We found the schema.
String schemaId = latestSchema.getSchemaId();
int schemaRev = latestSchema.getRevision();
recordBuilder.withSchemaId(schemaId);
recordBuilder.withSchemaRevision(schemaRev);
// Schema should have a field that's in ATTACHMENT_TYPE_SET, to store the attachment ref in.
List<UploadFieldDefinition> fieldDefList = latestSchema.getFieldDefinitions();
if (fieldDefList.isEmpty()) {
// No fields at all? Game over.
throw new UploadValidationException(String.format("Identified schema ID %s rev %d has no fields",
schemaId, schemaRev));
}
// Find fields of type in ATTACHMENT_TYPE_SET to store the attachment ref in. The field name is the same as the
// filename.
Set<String> fieldNameSet = new HashSet<>();
for (UploadFieldDefinition oneFieldDef : fieldDefList) {
String fieldName = oneFieldDef.getName();
fieldNameSet.add(fieldName);
if (ATTACHMENT_TYPE_SET.contains(oneFieldDef.getType())) {
byte[] data = unzippedDataMap.get(fieldName);
if (data != null) {
// Write this to the attachment map. UploadArtifactsHandler will take care of the rest.
attachmentMap.put(fieldName, data);
} else {
JsonNode jsonData = jsonDataMap.get(fieldName);
if (jsonData != null) {
// Convert to raw bytes, then add to attachment map.
try {
attachmentMap.put(fieldName, BridgeObjectMapper.get().writeValueAsBytes(jsonData));
} catch (JsonProcessingException ex) {
addMessageAndWarn(context, String.format(
"Upload ID %s attachment field %s could not be converted to JSON: %s", uploadId,
fieldName, ex.getMessage()));
}
} else if (oneFieldDef.isRequired()) {
addMessageAndWarn(context, String.format(
"Upload ID %s with schema ID %s has required field %s with no corresponding file",
uploadId,
schemaId, fieldName));
}
}
} else {
addMessageAndWarn(context, String.format("Upload ID %s with schema ID %s has non-attachment field %s",
uploadId, schemaId, fieldName));
}
}
// validate file names against field names
for (String oneFilename : unzippedDataMap.keySet()) {
if (!fieldNameSet.contains(oneFilename)) {
addMessageAndWarn(context, String.format(
"Upload ID %s with schema ID %s has file %s with no corresponding field", uploadId, schemaId,
oneFilename));
}
}
for (String oneJsonFilename : jsonDataMap.keySet()) {
if (oneJsonFilename.equals(FILENAME_INFO_JSON)) {
// skip info.json
continue;
}
if (!fieldNameSet.contains(oneJsonFilename)) {
addMessageAndWarn(context, String.format(
"Upload ID %s with schema ID %s has JSON file %s with no corresponding field", uploadId,
schemaId, oneJsonFilename));
}
}
}
private static boolean isSurvey(Map<String, JsonNode> jsonDataMap) {
// Check if it's a survey. We can tell if it's a survey by looking at one of the JSON files (other than
// info.json) and looking for specific survey keys, as listed in SURVEY_KEY_SET.
for (Map.Entry<String, JsonNode> oneJsonFile : jsonDataMap.entrySet()) {
if (oneJsonFile.getKey().equals(FILENAME_INFO_JSON)) {
// Not info.json. Skip.
continue;
}
// There may be fields beyond what's specified in SURVEY_KEY_SET. This is normal, since every question
// type has its own special fields.
JsonNode oneJsonFileNode = oneJsonFile.getValue();
Set<String> fieldNameSet = ImmutableSet.copyOf(oneJsonFileNode.fieldNames());
// We only need to look at one (other than info.json). Either they're all surveys, or none of them are.
return fieldNameSet.containsAll(SURVEY_KEY_SET);
}
// If there are no JSON entries other than info.json, then it's definitely not a survey.
return false;
}
private static void handleSurvey(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, String infoJsonItem, String taskRunId, UploadSchema surveySchema,
HealthDataRecordBuilder recordBuilder, Map<String, byte[]> attachmentMap, ObjectNode dataMap) {
// Currently, the 3rd party iOS apps don't tag surveys or questions with guids. (In fact, some of the
// surveys aren't even in the Surveys table yet.) So we can't store survey answers in the Survey
// Responses table. Instead, let's take all the answers, create a big ATTACHMENT_JSON_TABLE out of
// them.
ArrayNode answerArray = BridgeObjectMapper.get().createArrayNode();
for (Map.Entry<String, JsonNode> oneJsonFile : jsonDataMap.entrySet()) {
if (oneJsonFile.getKey().equals(FILENAME_INFO_JSON)) {
// Not info.json. Skip.
continue;
}
// add the JSON directly to our object. We're making a table.
answerArray.add(oneJsonFile.getValue());
}
// answers should be treated as an attachment
try {
attachmentMap.put(KEY_ANSWERS, BridgeObjectMapper.get().writeValueAsBytes(answerArray));
} catch (JsonProcessingException ex) {
addMessageAndWarn(context, String.format(
"Upload ID %s could not convert survey answers to JSON: %s", uploadId, ex.getMessage()));
}
// also, add item and taskRun to dataMap
dataMap.put(KEY_ITEM, infoJsonItem);
dataMap.put(KEY_TASK_RUN_ID, taskRunId);
// get the survey schema
recordBuilder.withSchemaId(surveySchema.getSchemaId());
recordBuilder.withSchemaRevision(surveySchema.getRevision());
}
private static void handleJsonData(UploadValidationContext context, String uploadId,
Map<String, JsonNode> jsonDataMap, List<UploadSchema> schemaList, HealthDataRecordBuilder recordBuilder,
Map<String, byte[]> attachmentMap, ObjectNode dataMap) throws UploadValidationException {
// JSON data may contain more than one JSON file. However, Health Data Records stores a single map.
// Flatten all the JSON maps together (other than info.json).
Map<String, JsonNode> dataFieldMap = flattenJsonDataMap(jsonDataMap);
Set<String> keySet = dataFieldMap.keySet();
// select schema
UploadSchema schema = selectSchema(context, uploadId, dataFieldMap, schemaList);
if (schema == null) {
// No schema, no health data record. Game over.
throw new UploadValidationException(String.format("No schema found for keys (%s)",
Joiner.on(", ").join(keySet)));
}
// We found the schema.
recordBuilder.withSchemaId(schema.getSchemaId());
recordBuilder.withSchemaRevision(schema.getRevision());
// copy fields to health data
copyJsonDataToHealthData(context, uploadId, dataFieldMap, schema, dataMap, attachmentMap);
}
private static Map<String, JsonNode> flattenJsonDataMap(Map<String, JsonNode> jsonDataMap) {
Map<String, JsonNode> dataFieldMap = new HashMap<>();
for (Map.Entry<String, JsonNode> oneJsonFile : jsonDataMap.entrySet()) {
String filename = oneJsonFile.getKey();
if (filename.equals(FILENAME_INFO_JSON)) {
// Not info.json. Skip.
continue;
}
JsonNode oneJsonFileNode = oneJsonFile.getValue();
Iterator<String> fieldNameIter = oneJsonFileNode.fieldNames();
while (fieldNameIter.hasNext()) {
// Pre-pend file name with field name, so if there are duplicate filenames, they get disambiguated.
String oneFieldName = fieldNameIter.next();
dataFieldMap.put(filename + "." + oneFieldName, oneJsonFileNode.get(oneFieldName));
}
}
return dataFieldMap;
}
private static UploadSchema selectSchema(UploadValidationContext context, String uploadId,
Map<String, JsonNode> dataFieldMap, List<UploadSchema> schemaList) {
// cross-ref our data map with our schema list to see which schema matches
UploadSchema latestSchema = null;
for (UploadSchema oneSchema : schemaList) {
Set<String> schemaKeySet = new HashSet<>();
// strategy: assume the schema matches, unless we find a field that doesn't match
boolean isMatch = true;
for (UploadFieldDefinition oneFieldDef : oneSchema.getFieldDefinitions()) {
String fieldName = oneFieldDef.getName();
schemaKeySet.add(fieldName);
JsonNode fieldValue = dataFieldMap.get(fieldName);
isMatch = validateField(context, uploadId, fieldValue, oneFieldDef);
if (!isMatch) {
// we already know it's not a match, so we can short-circuit
break;
}
}
if (isMatch && !schemaKeySet.containsAll(dataFieldMap.keySet())) {
// There are keys in the JSON that aren't present in the schema. This may refer to a different
// revision of the schema, or to a different schema entirely. Mark it as not a match.
// (Only check if we haven't already flagged it as non-match. Otherwise, this check may be
// meaningless.)
isMatch = false;
}
if (isMatch) {
// If we have more than one match, choose the one with the highest revision, as that's the one
// that's newest. (This assumes that while schema revisions may be the same, entirely different
// schemas will be different.(
if (latestSchema == null || oneSchema.getRevision() > latestSchema.getRevision()) {
latestSchema = oneSchema;
}
}
}
return latestSchema;
}
private static boolean validateField(UploadValidationContext context, String uploadId, JsonNode fieldValue,
UploadFieldDefinition fieldDef) {
if (fieldValue == null) {
// if field is not required, we're fine
return !fieldDef.isRequired();
} else {
switch (fieldDef.getType()) {
case ATTACHMENT_BLOB:
case ATTACHMENT_CSV:
// Attachment blob and csv expect non-JSON data, and we don't mix JSON with
// non-JSON data, so this is not a match.
return false;
case ATTACHMENT_JSON_BLOB:
case INLINE_JSON_BLOB:
// JSON blobs are always JSON blobs. We don't need to do any special validation.
return true;
case ATTACHMENT_JSON_TABLE:
// Basic sanity check: the outermost layer of the blob should be an array.
if (!fieldValue.isArray()) {
return false;
}
// Basic sanity check 2: The first element of the array is an object node.
JsonNode firstRow = fieldValue.get(0);
return (firstRow != null && firstRow.isObject());
case BOOLEAN:
return fieldValue.isBoolean();
case CALENDAR_DATE:
// We expect a string. Also, the string should be parseable by Joda LocalDate.
if (!fieldValue.isTextual()) {
return false;
}
try {
// DateUtils calls through to Joda parseLocalDate(), which is documented as
// never returning null. So we don't need to null check here.
DateUtils.parseCalendarDate(fieldValue.textValue());
return true;
} catch (RuntimeException ex) {
return false;
}
case FLOAT:
// includes floats, doubles, and decimals
return fieldValue.isFloatingPointNumber();
case INT:
// includes ints, longs, and big ints
return fieldValue.isIntegralNumber();
case STRING:
return fieldValue.isTextual();
case TIMESTAMP:
// either it's a string in ISO format, or it's a long in epoch milliseconds
if (fieldValue.isTextual()) {
try {
DateTime dateTime = parseTimestampHelper(context, uploadId, null,
fieldValue.textValue());
return (dateTime != null);
} catch (RuntimeException ex) {
return false;
}
} else if (fieldValue.isIntegralNumber()) {
try {
new DateTime(fieldValue.longValue());
return true;
} catch (RuntimeException ex) {
return false;
}
} else {
return false;
}
default:
// This should never happen, but just in case we add a new field to UploadFieldType
// but forget to upload this switch.
return false;
}
}
}
private static void copyJsonDataToHealthData(UploadValidationContext context, String uploadId,
Map<String, JsonNode> dataFieldMap, UploadSchema schema, ObjectNode dataMap,
Map<String, byte[]> attachmentMap) {
// Using schema, copy fields over to data map. Or if it's an attachment, add it to the attachment map.
for (UploadFieldDefinition oneFieldDef : schema.getFieldDefinitions()) {
String fieldName = oneFieldDef.getName();
JsonNode fieldValue = dataFieldMap.get(fieldName);
if (ATTACHMENT_TYPE_SET.contains(oneFieldDef.getType())) {
try {
attachmentMap.put(fieldName, BridgeObjectMapper.get().writeValueAsBytes(fieldValue));
} catch (JsonProcessingException ex) {
addMessageAndWarn(context, String.format(
"Upload ID %s field %s could not be converted to JSON: %s", uploadId, fieldName,
ex.getMessage()));
}
} else {
dataMap.set(fieldName, dataFieldMap.get(fieldName));
}
}
}
// For some reason, the iOS apps are sending timestamps in form "YYYY-MM-DD hh:mm:ss +ZZZZ", which is
// non-ISO-compliant and can't be parsed by JodaTime. We'll need to convert these to ISO format, generally
// "YYYY-MM-DDThh:mm:ss+ZZZZ".
// TODO: Remove this hack when it's no longer needed.
private static DateTime parseTimestampHelper(UploadValidationContext context, String uploadId, String filename,
String timestampStr) {
if (StringUtils.isBlank(timestampStr)) {
addMessageAndWarn(context, String.format("upload ID %s filename %s has blank time stamp", uploadId,
filename));
return null;
}
// Detect if this is iOS non-standard format by checking to see if the 10th char is a space.
if (timestampStr.charAt(10) == ' ') {
addMessageAndWarn(context, String.format("upload ID %s filename %s has non-standard timestamp format %s",
uploadId, filename, timestampStr));
// Attempt to convert this by replacing the 10th char with a T and then stripping out all spaces.
timestampStr = timestampStr.substring(0, 10) + 'T' + timestampStr.substring(11);
timestampStr = timestampStr.replaceAll("\\s+", "");
}
try {
return DateUtils.parseISODateTime(timestampStr);
} catch (RuntimeException ex) {
addMessageAndWarn(context, String.format("upload ID %s filename %s has invalid timestamp %s", uploadId,
filename, timestampStr));
return null;
}
}
private static void addMessageAndWarn(UploadValidationContext context, String message) {
context.addMessage(message);
logger.warn(message);
}
}
|
Upload Validation: check all files in isSurvey(), since some activities mix surveys with non-surveys
|
app/org/sagebionetworks/bridge/upload/IosSchemaValidationHandler.java
|
Upload Validation: check all files in isSurvey(), since some activities mix surveys with non-surveys
|
|
Java
|
apache-2.0
|
e7dea00f03fd593d93beaa59c328a320d0c8dc15
| 0
|
renatocf/MAC0242-PROJECT,renatocf/MAC0242-PROJECT,renatocf/MAC0242-PROJECT
|
/**********************************************************************/
/* Copyright 2013 KRV */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); */
/* you may not use this file except in compliance with the License. */
/* You may obtain a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, */
/* software distributed under the License is distributed on an */
/* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, */
/* either express or implied. */
/* See the License for the specific language governing permissions */
/* and limitations under the License. */
/**********************************************************************/
package robot;
// Default libraries
import java.util.Vector;
import java.util.HashMap;
//Libraries
import robot.*;
import stackable.*;
import exception.*;
import parameters.*;
/**
* <b>Ctrl</b><br>
* Given a assembly function name and its
* argument (if required), executes it for
* the Virtual Machine specified.
*
* @author Karina Awoki
* @author Renato Cordeiro Ferreira
* @author Vinícius Silva
* @see Ctrl
*/
final public class Ctrl
{
// No instances of this class allowed
private Ctrl() {}
/**
* Selector for the function to be called
* @param rvm Virtual Machine
* @param met String with the name of the function
* @param arg Argument of the assembly method
*
* @throws SegmentationFaultException
* @throws UndefinedFunctionException
* @throws InvalidOperationException
* @throws NotInitializedException
* @throws StackUnderflowException
* @throws NoLabelFoundException
* @throws OutOfBoundsException
* @throws WrongTypeException
*/
public static void ctrl(RVM rvm, String met, Stackable arg)
throws SegmentationFaultException,
UndefinedFunctionException,
InvalidOperationException,
NotInitializedException,
StackUnderflowException,
OutOfBoundsException,
WrongTypeException
{
// Debug
Debugger.print("[CTRL] ", met);
if(arg != null) Debugger.print(" ", arg.toString());
Debugger.say();
if(met.equals("END")) Debugger.say("===========");
switch(met)
{
// IO functions
case "PRN" : IO.PRN (rvm); break;
// Stack functions
case "POP" : Stk.POP (rvm); break;
case "PUSH": Stk.PUSH (rvm, arg); break;
case "DUP" : Stk.DUP (rvm); break;
case "SWAP": Stk.SWAP (rvm); break;
// Arithmetic functions
case "ADD" : Arit.ADD (rvm); break;
case "SUB" : Arit.SUB (rvm); break;
case "MUL" : Arit.MUL (rvm); break;
case "DIV" : Arit.DIV (rvm); break;
case "MOD" : Arit.MOD (rvm); break;
// Memory functions
case "STO" : Mem.STO (rvm, arg); break;
case "RCL" : Mem.RCL (rvm, arg); break;
// Tests functions
case "EQ" : Tests.EQ (rvm); break;
case "GT" : Tests.GT (rvm); break;
case "GE" : Tests.GE (rvm); break;
case "LT" : Tests.LT (rvm); break;
case "LE" : Tests.LE (rvm); break;
case "NE" : Tests.NE (rvm); break;
// Jumps functions
case "JMP" : Jumps.JMP (rvm, arg); break;
case "JIT" : Jumps.JIT (rvm, arg); break;
case "JIF" : Jumps.JIF (rvm, arg); break;
// Program workflow
case "NOP" : Prog.NOP (rvm); break;
case "END" : Prog.END (rvm); break;
// Functions
case "CALL": Func.CALL (rvm, arg); break;
case "RET" : Func.RET (rvm); break;
// System calls
case "MOVE": Syst.MOVE (rvm); break;
case "DRAG": Syst.DRAG (rvm); break;
case "DROP": Syst.DROP (rvm); break;
case "HIT" : Syst.HIT (rvm); break;
case "LOOK": Syst.LOOK (rvm); break;
case "SEE" : Syst.SEE (rvm); break;
case "ASK" : Syst.ASK (rvm); break;
// Item verification
case "ITEM": Check.ITEM (rvm); break;
case "SEEK": Check.SEEK (rvm); break;
// Local variables
case "ALOC": Var.ALOC (rvm, arg); break;
case "FREE": Var.FREE (rvm, arg); break;
case "GET" : Var.GET (rvm, arg); break;
case "SET" : Var.SET (rvm, arg); break;
// Network interface
case "READ": Net.READ (rvm); break;
case "WRT" : Net.WRT (rvm); break;
// Base case
default: throw new InvalidOperationException(met);
}
}
}
|
src/robot/Ctrl.java
|
/**********************************************************************/
/* Copyright 2013 KRV */
/* */
/* Licensed under the Apache License, Version 2.0 (the "License"); */
/* you may not use this file except in compliance with the License. */
/* You may obtain a copy of the License at */
/* */
/* http://www.apache.org/licenses/LICENSE-2.0 */
/* */
/* Unless required by applicable law or agreed to in writing, */
/* software distributed under the License is distributed on an */
/* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, */
/* either express or implied. */
/* See the License for the specific language governing permissions */
/* and limitations under the License. */
/**********************************************************************/
package robot;
// Default libraries
import java.util.Vector;
import java.util.HashMap;
//Libraries
import robot.*;
import stackable.*;
import exception.*;
import parameters.*;
/**
* <b>Ctrl</b><br>
* Given a assembly function name and its
* argument (if required), executes it for
* the Virtual Machine specified.
*
* @author Karina Awoki
* @author Renato Cordeiro Ferreira
* @author Vinícius Silva
* @see Ctrl
*/
final public class Ctrl
{
// No instances of this class allowed
private Ctrl() {}
/**
* Selector for the function to be called
* @param rvm Virtual Machine
* @param met String with the name of the function
* @param arg Argument of the assembly method
*
* @throws SegmentationFaultException
* @throws UndefinedFunctionException
* @throws InvalidOperationException
* @throws NotInitializedException
* @throws StackUnderflowException
* @throws NoLabelFoundException
* @throws OutOfBoundsException
* @throws WrongTypeException
*/
public static void ctrl(RVM rvm, String met, Stackable arg)
throws SegmentationFaultException,
UndefinedFunctionException,
InvalidOperationException,
NotInitializedException,
StackUnderflowException,
OutOfBoundsException,
WrongTypeException
{
// Debug
Debugger.print("[CTRL] ", met);
if(arg != null) Debugger.print(" ", arg.toString());
Debugger.say();
if(met.equals("END")) Debugger.say("===========");
switch(met)
{
// IO functions
case "PRN" : IO.PRN (rvm); break;
// Stack functions
case "POP" : Stk.POP (rvm); break;
case "PUSH": Stk.PUSH (rvm, arg); break;
case "DUP" : Stk.DUP (rvm); break;
case "SWAP": Stk.SWAP (rvm); break;
// Arithmetic functions
case "ADD" : Arit.ADD (rvm); break;
case "SUB" : Arit.SUB (rvm); break;
case "MUL" : Arit.MUL (rvm); break;
case "DIV" : Arit.DIV (rvm); break;
case "MOD" : Arit.MOD (rvm); break;
// Memory functions
case "STO" : Mem.STO (rvm, arg); break;
case "RCL" : Mem.RCL (rvm, arg); break;
// Tests functions
case "EQ" : Tests.EQ (rvm); break;
case "GT" : Tests.GT (rvm); break;
case "GE" : Tests.GE (rvm); break;
case "LT" : Tests.LT (rvm); break;
case "LE" : Tests.LE (rvm); break;
case "NE" : Tests.NE (rvm); break;
// Jumps functions
case "JMP" : Jumps.JMP (rvm, arg); break;
case "JIT" : Jumps.JIT (rvm, arg); break;
case "JIF" : Jumps.JIF (rvm, arg); break;
// Program workflow
case "NOP" : Prog.NOP (rvm); break;
case "END" : Prog.END (rvm); break;
// Functions
case "CALL": Func.CALL (rvm, arg); break;
case "RET" : Func.RET (rvm); break;
// System calls
case "MOVE": Syst.MOVE (rvm); break;
case "DRAG": Syst.DRAG (rvm); break;
case "DROP": Syst.DROP (rvm); break;
case "HIT" : Syst.HIT (rvm); break;
case "LOOK": Syst.LOOK (rvm); break;
case "SEE" : Syst.SEE (rvm); break;
case "ASK" : Syst.ASK (rvm); break;
// Item verification
case "ITEM": Check.ITEM (rvm); break;
case "SEEK": Check.SEEK (rvm); break;
// Loval variables
case "ALOC": Var.ALOC (rvm, arg); break;
case "FREE": Var.FREE (rvm, arg); break;
case "GET" : Var.GET (rvm, arg); break;
case "SET" : Var.SET (rvm, arg); break;
// Base case
default: throw new InvalidOperationException(met);
}
}
}
|
Adding READ and WRT commands in the controller
|
src/robot/Ctrl.java
|
Adding READ and WRT commands in the controller
|
|
Java
|
apache-2.0
|
cf1b7f279a7141295080dcca2af9ad36ae832f47
| 0
|
jaredrummler/AndroidDeviceNames,jaredrummler/AndroidDeviceNames
|
/*
* Copyright (C) 2015. Jared Rummler <me@jaredrummler.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.jaredrummler.android.device;
import android.Manifest;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* <p>On many Android devices, the best way to display the device name to the user is by getting
* the
* value of {@link android.os.Build#MODEL}. Unfortunately, on many popular devices, this value is
* not consumer friendly. For example, on the Samsung Galaxy S6 the value of {@link Build#MODEL}
* could be "SM-G920F", "SM-G920I", "SM-G920W8", etc.</p>
*
* <p>To get the market (consumer friendly) name of a device you can use one (or both) of the
* following examples:</p>
*
* <b>Example 1:</b>
*
* <br>
* <p>{@code String deviceName = DeviceName.getDeviceName();}</p>
* <b>Example 2:</b>
* <br>
* <pre>
* <code>
* DeviceName.with(context).request(new DeviceName.Callback() {
* {@literal @}Override
* public void onFinished(DeviceName.DeviceInfo info, Exception error) {
* String deviceName;
* if (error != null) {
* deviceName = info.getName();
* } else {
* deviceName = DeviceName.getDeviceName();
* }
* }
* });
* </code>
* </pre>
* <p>
* <b>Example 1:</b> contains over 600 popular Android devices and can be run on the UI thread. If
* the current device is not in the list then {@link Build#MODEL} will be returned as a fallback.
* </p>
* <p>
* <b>Example 2:</b> loads JSON from a generated list of device names based on Google's maintained
* list and contains around 10,000 devices. This needs a network connection and is run in a
* background thread.
* </p>
*
* @author Jared Rummler
*/
public class DeviceName {
// JSON which is derived from Google's PDF document which contains all devices on Google Play
// To get the URL to the JSON file which contains information about the device name:
// String url = String.format(DEVICE_JSON_URL, Build.DEVICE);
private static final String DEVICE_JSON_URL =
"https://raw.githubusercontent.com/jaredrummler/AndroidDeviceNames/master/json/codenames/%s.json";
// Preference filename for storing device info so we don't need to download it again.
private static final String SHARED_PREF_NAME = "device_names";
/**
* Create a new request to get information about a device.
*
* @param context
* the application context
* @return a new Request instance.
*/
public static Request with(Context context) {
return new Request(context.getApplicationContext());
}
/**
* Get the consumer friendly name of the device.
*
* @return the market name of the current device.
* @see #getDeviceName(String, String)
*/
public static String getDeviceName() {
String manufacturer = Build.MANUFACTURER;
String model = Build.MODEL;
String fallback;
if (model.startsWith(manufacturer)) {
fallback = capitalize(model);
} else {
fallback = capitalize(manufacturer) + " " + model;
}
return getDeviceName(Build.DEVICE, fallback);
}
/**
* Get the consumer friendly name of a device.
*
* @param codename
* the value of the system property "ro.product.device" ({@link Build#DEVICE}).
* @param fallback
* the fallback name if the device is unknown. Usually the value of the system property
* "ro.product.model" ({@link Build#MODEL})
* @return the market name of a device or {@code fallback} if the device is unknown.
*/
public static String getDeviceName(String codename, String fallback) {
switch (codename) {
case "quark":
return "DROID Turbo";
case "K01N_2":
case "K01N_1":
case "K012":
case "K00Z":
return "Fonepad 7";
case "a3ltedd":
case "a3ulte":
case "a3ltezh":
case "a3ltechn":
case "a3lte":
case "a33g":
case "a3ltectc":
case "a3lteslk":
case "a3ltezt":
return "Galaxy A3";
case "a5ltectc":
case "a5ltezh":
case "a5ulteskt":
case "a53g":
case "a5ltezt":
case "a5lte":
case "a5ltechn":
case "a5ulte":
case "a5ultektt":
case "a5ultelgt":
return "Galaxy A5";
case "a8elte":
case "a8elteskt":
case "a8ltechn":
return "Galaxy A8";
case "vivaltods5m":
return "Galaxy Ace 4";
case "GT-S6802B":
case "GT-S6802":
case "SCH-i589":
case "SCH-i579":
case "GT-S6352":
case "SCH-I589":
case "SCH-I579":
return "Galaxy Ace Duos";
case "GT-S7500L":
case "GT-S7500":
case "GT-S7500T":
case "GT-S7508":
case "GT-S7500W":
return "Galaxy Ace Plus";
case "heatqlte":
case "heat3gtfnvzw":
case "heatnfc3g":
return "Galaxy Ace Style";
case "vivaltolte":
case "vivalto5mve3g":
case "vivaltonfc3g":
case "vivalto3g":
case "vivalto3mve3g":
return "Galaxy Ace4";
case "sltechn":
case "sltelgt":
case "slteskt":
case "sltektt":
case "sltecan":
case "slteatt":
case "slte":
return "Galaxy Alpha";
case "rossalte":
case "coreprimeltespr":
case "rossaltexsa":
case "coreprimelte":
case "coreprimeltevzw":
case "coreprimeve3g":
case "core33g":
case "cprimeltetmo":
case "coreprimelteaio":
case "cprimeltemtr":
case "rossaltectc":
case "coreprimeltetfnvzw":
case "coreprimevelte":
return "Galaxy Core Prime";
case "kanas3g":
case "kanas3gnfc":
case "kanas3gcmcc":
case "kanas":
case "kanas3gctc":
return "Galaxy Core2";
case "e5lte":
case "e53g":
case "e5ltetfnvzw":
case "e5ltetw":
return "Galaxy E5";
case "e7lte":
case "e7ltehktw":
case "e7ltechn":
case "e73g":
case "e7ltectc":
return "Galaxy E7";
case "nevis":
case "SCH-I629":
case "nevis3gcmcc":
case "nevisw":
case "nevis3g":
case "nevisp":
case "nevisnvess":
case "nevisvess":
case "nevisds":
return "Galaxy Fame";
case "baffinq3g":
case "baffinlitedtv":
case "baffinlite":
return "Galaxy Grand Neo";
case "gprimelteusc":
case "grandprimeveltezt":
case "grandprimelte":
case "fortunaltezh":
case "gprimeltetfnvzw":
case "fortunalte":
case "gprimeltecan":
case "fortunaltectc":
case "grandprimevelteltn":
case "gprimeltespr":
case "fortuna3gdtv":
case "gprimeltetmo":
case "fortuna3g":
case "grandprimeve3g":
case "fortunaltezt":
case "grandprimelteaio":
case "grandprimevelte":
case "grandprimeve3gdtv":
return "Galaxy Grand Prime";
case "ms013g":
case "ms01lte":
case "ms013gdtv":
case "ms01ltelgt":
case "ms013gss":
case "ms01lteskt":
case "ms01ltektt":
return "Galaxy Grand2";
case "toroplus":
case "maguro":
case "toro":
return "Galaxy Nexus";
case "p4noterf":
case "p4noteltektt":
case "p4notewifiww":
case "p4noterfktt":
case "lt03ltektt":
case "p4notewifi43241any":
case "p4noteltespr":
case "p4noteltevzw":
case "p4notelte":
case "p4noteltelgt":
case "lt03ltelgt":
case "p4notelteskt":
case "lt033g":
case "lt03lteskt":
case "p4notelteusc":
case "p4notewifi":
case "p4notewifiany":
case "p4notewifiktt":
return "Galaxy Note 10.1";
case "tbltechn":
case "tbltecan":
case "tblteatt":
case "tbeltelgt":
case "tbltevzw":
case "tbeltektt":
case "tbelteskt":
case "tbltespr":
case "tblteusc":
case "SCL24":
case "tblte":
case "tbltetmo":
return "Galaxy Note Edge";
case "v1awifi":
case "v1awifikx":
case "viennalte":
case "viennaltevzw":
case "v1a3g":
case "viennaltekx":
case "viennalteatt":
return "Galaxy Note Pro 12.2";
case "t0ltecmcc":
case "SC-02E":
case "t03gchn":
case "t0ltevzw":
case "t0ltetmo":
case "t03g":
case "t0ltespr":
case "t0ltelgt":
case "t0lteskt":
case "t03gcmcc":
case "t0lteatt":
case "t03gchnduos":
case "t0lteusc":
case "t03gctc":
case "t0lte":
case "t0ltektt":
case "t0ltedcm":
case "t03gcuduos":
case "t0ltecan":
return "Galaxy Note2";
case "SC-02F":
case "hltelgt":
case "hlteskt":
case "hltetmo":
case "hlte":
case "hltespr":
case "hlteatt":
case "htdlte":
case "SCL22":
case "hltektt":
case "hlteusc":
case "hltevzw":
case "ha3g":
case "hltecan":
return "Galaxy Note3";
case "hl3gds":
case "hllte":
case "frescoltektt":
case "hl3g":
case "frescolteskt":
case "frescoltelgt":
return "Galaxy Note3 Neo";
case "tre3caltelgt":
case "trltechn":
case "trlte":
case "tre3calteskt":
case "trlteusc":
case "tre3g":
case "treltektt":
case "trlteatt":
case "trltechnzh":
case "tre3caltektt":
case "treltelgt":
case "trltevzw":
case "trltespr":
case "trltetmo":
case "trelteskt":
case "trltecan":
case "trelte":
case "trhplte":
return "Galaxy Note4";
case "noblelteusc":
case "nobleltechn":
case "nobleltektt":
case "nobleltecmcc":
case "nobleltevzw":
case "noblelte":
case "nobleltetmo":
case "nobleltespr":
case "nobleltebmc":
case "nobleltehk":
case "nobleltelgt":
case "noblelteskt":
case "noblelteatt":
return "Galaxy Note5";
case "kyletdcmcc":
case "kylechn":
case "kyleichn":
case "kyleopen":
return "Galaxy S Duos";
case "kyleprods":
return "Galaxy S Duos2";
case "vivalto3gvn":
return "Galaxy S Duos3";
case "SHW-M250L":
case "GT-I9108":
case "SHW-M250K":
case "SCH-R760X":
case "SC-02C":
case "SGH-T989":
case "GT-I9100":
case "SHW-M250S":
case "GT-I9103":
case "GT-I9100M":
case "GT-I9100P":
case "GT-I9210T":
case "GT-I9100T":
case "SGH-I777":
case "t1cmcc":
case "SHV-E110S":
case "SGH-S959G":
return "Galaxy S2";
case "SC-03E":
case "d2xar":
case "d2mtr":
case "d2spi":
case "d2vmu":
case "d2att":
case "m0skt":
case "d2lterefreshspr":
case "d2tmo":
case "d2cri":
case "m0ctc":
case "d2dcm":
case "d2can":
case "c1lgt":
case "m0":
case "c1skt":
case "m3":
case "d2lteMetroPCS":
case "d2ltetmo":
case "d2usc":
case "m3dcm":
case "m0chn":
case "d2tfnvzw":
case "m0apt":
case "c1ktt":
case "d2tfnspr":
case "c1att":
case "d2vzw":
case "m0ctcduos":
case "d2spr":
case "m0cmcc":
return "Galaxy S3";
case "goldenltevzw":
case "goldenlteatt":
case "golden":
case "goldenve3g":
case "goldenltebmc":
return "Galaxy S3 Mini";
case "s3ve3g":
case "s3ve3gdsdd":
case "s3ve3gds":
case "s3ve3gdd":
return "Galaxy S3 Neo";
case "jalteskt":
case "jfltetmo":
case "ks01ltektt":
case "jfvelte":
case "jaltelgt":
case "jfltecri":
case "jfltelra":
case "jfltetfntmo":
case "jflterefreshspr":
case "jflteusc":
case "ks01ltelgt":
case "jflteaio":
case "jfltecan":
case "ks01lte":
case "jfltespr":
case "SC-04E":
case "jflte":
case "jfltevzw":
case "jfwifi":
case "jfltecsp":
case "jfltetfnatt":
case "jaltektt":
case "jflteatt":
case "jsglte":
case "ja3g":
case "ja3gduosctc":
case "jftdd":
return "Galaxy S4";
case "klte":
case "kltektt":
case "klteduoszn":
case "kltecan":
case "klteMetroPCS":
case "lentisltelgt":
case "lentislteskt":
case "klteacg":
case "klteusc":
case "SCL23":
case "klteaio":
case "kltelra":
case "klteskt":
case "lentisltektt":
case "klteatt":
case "kltevzw":
case "kltespr":
case "kwifi":
case "kltetmo":
case "k3g":
case "kltelgt":
return "Galaxy S5";
case "zerofltechn":
case "zeroflteaio":
case "zeroflte":
case "zerofltektt":
case "zerofltemtr":
case "zeroflteatt":
case "zeroflteusc":
case "zeroflteacg":
case "zerofltelgt":
case "zeroflteskt":
case "zerofltebmc":
case "zerofltelra":
case "zerofltectc":
case "zerofltevzw":
case "zerofltespr":
case "zerofltetmo":
case "zerofltetfnvzw":
return "Galaxy S6";
case "zeroltektt":
case "zeroltechn":
case "zerolteacg":
case "zerolteusc":
case "zeroltevzw":
case "zeroltelra":
case "zerolteskt":
case "zeroltelgt":
case "SCV31":
case "zeroltebmc":
case "zeroltetmo":
case "zeroltespr":
case "zerolte":
case "zerolteatt":
case "404SC":
return "Galaxy S6 Edge";
case "zenltebmc":
case "zenltechn":
case "zenltelgt":
case "zenltetmo":
case "zenltespr":
case "zenltevzw":
case "zenlte":
case "zenlteatt":
case "zenlteskt":
case "zenltektt":
return "Galaxy S6 Edge+";
case "SC-01D":
case "SHW-M380K":
case "GT-P7500":
case "SCH-I905":
case "GT-P7510":
case "SHW-M300W":
case "GT-P7503":
case "SGH-T859":
case "SHW-M380S":
case "GT-P7500D":
case "SHW-M380W":
return "Galaxy Tab 10.1";
case "GT-P6210":
case "SC-02D":
case "GT-P6201":
case "GT-P6200":
case "GT-P6211":
case "GT-P6200L":
case "SGH-T869":
case "SHW-M430W":
return "Galaxy Tab 7.0 Plus";
case "degaswifiue":
case "403SC":
case "degasltespr":
case "degasveltechn":
case "degaswifiopenbnn":
case "degasvelte":
case "degaswifibmwzc":
case "degaswifidtv":
case "degasltevzw":
case "degaswifi":
case "degas3g":
case "degaslte":
return "Galaxy Tab4 7.0";
case "GT-S5368":
case "SCH-i509":
case "GT-S5360T":
case "SCH-I509":
case "GT-S5369":
case "GT-S5360":
case "GT-S5360L":
case "GT-S5360B":
case "GT-S5363":
return "Galaxy Y";
case "m7cdug":
case "m7cdtu":
case "m7":
case "m7wlv":
case "m7cdwg":
case "m7wls":
return "HTC One";
case "htc_mecul":
case "htc_mectl":
case "htc_mecwhl":
case "htc_mecdwg":
return "HTC One (E8)";
case "htc_m8wl":
case "htc_m8dwg":
case "htc_m8dug":
case "htc_m8":
case "htc_m8whl":
return "HTC One (M8)";
case "htc_himauhl":
case "htc_himawhl":
case "htc_himaulatt":
case "htc_himaul":
case "htc_himawl":
return "HTC One M9";
case "villec2":
case "ville":
return "HTC One S";
case "hwH30-T10":
case "hwH30-U10":
case "hwhn3-u00":
case "hwhn3-u01":
return "Honor3";
case "acer_harleyfhd":
case "acer_harley":
return "Iconia Tab 10";
case "acer_apriliahd":
case "acer_aprilia":
return "Iconia Tab 7";
case "ducati2fhd":
case "ducati2hd":
case "ducati2hd3g":
return "Iconia Tab 8";
case "zee":
return "LG G Flex";
case "z2":
return "LG G Flex2";
case "g2":
return "LG G2";
case "g3":
return "LG G3";
case "p1":
return "LG G4";
case "c50ds":
case "c50n":
case "c50":
return "LG Leon 4G LTE";
case "cosmopolitan":
return "LG Optimus 3D";
case "geehdc":
case "geehrc":
case "geehrc4g":
case "geeb":
return "LG Optimus G";
case "geefhd4g":
case "geefhd":
return "LG Optimus G Pro";
case "u2":
return "LG Optimus L9";
case "a1":
return "Liquid";
case "acer_e3n":
case "acer_e3":
return "Liquid E3";
case "acer_S55":
return "Liquid Jade";
case "acer_S56":
return "Liquid Jade S";
case "s3":
return "Liquid S3";
case "acer_ZXL":
return "Liquid Z5";
case "surnia_uds":
case "condor_cdma":
case "condor_umts":
case "condor_umtsds":
case "condor_udstv":
case "otus":
case "otus_ds":
case "surnia_cdma":
case "surnia_umts":
case "surnia_udstv":
return "MOTO E";
case "osprey_udstv":
case "osprey_umts":
case "thea_umtsds":
case "falcon_umts":
case "thea":
case "titan_umtsds":
case "peregrine":
case "osprey_cdma":
case "titan_udstv":
case "titan_umts":
case "osprey_uds":
case "osprey_ud2":
case "falcon_umtsds":
case "thea_ds":
case "falcon_cdma":
case "osprey_u2":
return "MOTO G";
case "ghost":
case "victara":
return "MOTO X";
case "K013":
case "K01A":
case "K017":
case "K013C":
case "K01U_2":
case "K01U_1":
case "K013_1":
return "MeMO Pad 7";
case "clark":
return "Moto X Style";
case "manta":
return "Nexus 10";
case "hammerhead":
return "Nexus 5";
case "shamu":
return "Nexus 6";
case "grouper":
case "tilapia":
return "Nexus 7 (2012)";
case "deb":
case "flo":
return "Nexus 7 (2013)";
case "flounder":
return "Nexus 9";
case "A0001":
return "OnePlus One";
case "p990":
case "p990_CIS-xxx":
case "star":
case "star_450-05":
case "su660":
case "p990_EUR-xx":
case "p990hN":
case "p999":
case "p990_262-xx":
return "Optimus 2X";
case "cosmo_450-05":
case "p920":
case "cosmo_EUR-XXX":
case "su760":
case "cosmo_MEA-XXX":
return "Optimus 3D";
case "cx2":
return "Optimus 3D MAX";
case "bproj_214-03":
case "bproj_EUR-XXX":
case "bproj_ARE-XXX":
case "black":
case "LGL85C":
case "bproj_302-220":
case "ku5900":
case "bproj_262-XXX":
case "blackg":
case "bproj_sea-xxx":
case "bproj_724-xxx":
case "lgp970":
case "bproj_334-020":
return "Optimus Black";
case "m4":
return "Optimus L5";
case "i_skt":
case "iproj":
case "lgp930":
case "i_dcm":
case "lgp935":
case "i_u":
return "Optimus LTE";
case "su370":
case "ku3700":
case "thunder_kor-05":
case "lu3700":
case "thunderc":
case "thunder_kor-08":
return "Optimus One";
case "l06c":
case "v901ar":
case "v905r":
case "v900":
case "v900asia":
case "v901tr":
case "v909mkt":
case "v909":
case "v901kr":
return "Optimus Pad";
case "thunderbird":
case "LW":
case "Venue7":
return "Venue 7";
case "BB":
case "yellowtail":
case "Venue8":
return "Venue 8";
case "wifi_hubble":
case "umts_hubble":
case "umts_everest":
case "stingray":
case "wingray":
return "XOOM";
case "D2203":
case "D2202":
case "D2243":
case "D2206":
return "Xperia E3";
case "E5653":
case "E5606":
case "E5603":
return "Xperia M5";
case "E5643":
case "E5633":
case "E5663":
return "Xperia M5 Dual";
case "SO-02D":
case "LT26i":
return "Xperia S";
case "D5316":
case "D5303":
case "D5322":
case "D5316N":
case "D5306":
return "Xperia T2 Ultra";
case "txs03":
return "Xperia Tablet S";
case "SO-03E":
case "SGP312":
case "SGP311":
case "SGP321":
case "SGP341":
case "SGP351":
return "Xperia Tablet Z";
case "D6503":
case "D6502":
case "SO-03F":
case "D6543":
return "Xperia Z2";
case "D6603":
case "401SO":
case "SOL26":
case "D6643":
case "D6653":
case "SO-01G":
case "leo":
case "D6616":
return "Xperia Z3";
case "402SO":
case "SO-03G":
case "SOV31":
return "Xperia Z4";
case "E5823":
case "E5803":
return "Xperia Z5 Compact";
case "ASUS_Z00RD_7":
case "ASUS_Z00RD_5":
return "ZenFone 2";
case "ASUS_T00I":
case "ASUS_T00Q":
return "ZenFone 4";
case "ASUS_T00F1":
case "ASUS_T00K":
case "ASUS_T00J":
case "ASUS_T00F":
case "ASUS_T00J1":
return "ZenFone 5";
default:
return fallback;
}
}
/**
* <p>
* Capitalizes getAllProcesses the whitespace separated words in a String. Only the first letter
* of each word is changed.
* </p>
* Whitespace is defined by {@link Character#isWhitespace(char)}.
*
* @param str
* the String to capitalize
* @return capitalized The capitalized String
*/
private static String capitalize(String str) {
if (TextUtils.isEmpty(str)) {
return str;
}
char[] arr = str.toCharArray();
boolean capitalizeNext = true;
String phrase = "";
for (char c : arr) {
if (capitalizeNext && Character.isLetter(c)) {
phrase += Character.toUpperCase(c);
capitalizeNext = false;
continue;
} else if (Character.isWhitespace(c)) {
capitalizeNext = true;
}
phrase += c;
}
return phrase;
}
/** Download URL to String */
private static String downloadJson(String myurl) throws IOException {
StringBuilder sb = new StringBuilder();
BufferedReader reader = null;
try {
URL url = new URL(myurl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setRequestMethod("GET");
conn.setDoInput(true);
conn.connect();
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
}
return sb.toString();
} finally {
if (reader != null) {
reader.close();
}
}
}
/** Get the device name from the generated JSON files created from Google's device list. */
private static DeviceInfo getDeviceInfo(Context context, String codename, String model) {
SharedPreferences prefs = context.getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE);
String key = String.format("%s:%s", codename, model);
String savedJson = prefs.getString(key, null);
if (savedJson != null) {
try {
return new DeviceInfo(new JSONObject(savedJson));
} catch (JSONException e) {
e.printStackTrace();
}
}
// check if we have an internet connection
int ret = context.checkCallingOrSelfPermission(Manifest.permission.ACCESS_NETWORK_STATE);
boolean isConnectedToNetwork = false;
if (ret == PackageManager.PERMISSION_GRANTED) {
ConnectivityManager connMgr = (ConnectivityManager)
context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connMgr.getActiveNetworkInfo();
if (networkInfo != null && networkInfo.isConnected()) {
isConnectedToNetwork = true;
}
} else {
// assume we are connected.
isConnectedToNetwork = true;
}
if (isConnectedToNetwork) {
try {
String url = String.format(DEVICE_JSON_URL, codename);
String jsonString = downloadJson(url);
JSONArray jsonArray = new JSONArray(jsonString);
for (int i = 0, len = jsonArray.length(); i < len; i++) {
JSONObject json = jsonArray.getJSONObject(i);
DeviceInfo info = new DeviceInfo(json);
if (codename.equals(info.codename) && model.equals(info.model)) {
// Save to SharedPreferences so we don't need to make another request.
prefs.edit().putString(key, json.toString()).commit();
return info;
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
if (codename.equals(Build.DEVICE) && model.equals(Build.MODEL)) {
// current device
return new DeviceInfo(
Build.MANUFACTURER,
getDeviceName(),
codename,
model);
}
// unknown device
return new DeviceInfo(
null,
null,
codename,
model);
}
public static final class Request {
private final Context context;
private final Handler handler;
private String codename;
private String model;
private Request(Context ctx) {
context = ctx;
handler = new Handler(ctx.getMainLooper());
codename = Build.DEVICE;
model = Build.MODEL;
}
/**
* Set the device codename to query. You should also set the model.
*
* @param codename
* the value of the system property "ro.product.device"
* @return This Request object to allow for chaining of calls to set methods.
* @see Build#DEVICE
*/
public Request setCodename(String codename) {
this.codename = codename;
return this;
}
/**
* Set the device model to query. You should also set the codename.
*
* @param model
* the value of the system property "ro.product.model"
* @return This Request object to allow for chaining of calls to set methods.
* @see Build#MODEL
*/
public Request setModel(String model) {
this.model = model;
return this;
}
/**
* Download information about the device. This saves the results in shared-preferences so
* future requests will not need a network connection.
*
* @param callback
* the callback to retrieve the {@link DeviceName.DeviceInfo}
*/
public void request(Callback callback) {
if (Looper.myLooper() == Looper.getMainLooper()) {
new Thread(runnable(callback)).start();
} else {
runnable(callback).run(); // already running in background thread.
}
}
private Runnable runnable(final Callback callback) {
return new Runnable() {
DeviceInfo deviceInfo;
Exception error;
@Override
public void run() {
try {
deviceInfo = getDeviceInfo(context, codename, model);
} catch (Exception e) {
error = e;
}
handler.post(new Runnable() {
@Override
public void run() {
callback.onFinished(deviceInfo, error);
}
});
}
};
}
}
/**
* Callback which is invoked when the {@link DeviceName.DeviceInfo} is finished loading.
*/
public interface Callback {
/**
* Callback to get the device info. This is run on the UI thread.
*
* @param info
* the requested {@link DeviceName.DeviceInfo}
* @param error
* {@code null} if nothing went wrong.
*/
void onFinished(DeviceInfo info, Exception error);
}
/**
* Device information based on
* <a href="https://support.google.com/googleplay/answer/1727131">Google's maintained list</a>.
*/
public static final class DeviceInfo {
/** Retail branding */
public final String manufacturer;
/** Marketing name */
public final String marketName;
/** the value of the system property "ro.product.device" */
public final String codename;
/** the value of the system property "ro.product.model" */
public final String model;
public DeviceInfo(String manufacturer, String marketName, String codename, String model) {
this.manufacturer = manufacturer;
this.marketName = marketName;
this.codename = codename;
this.model = model;
}
private DeviceInfo(JSONObject jsonObject) throws JSONException {
manufacturer = jsonObject.getString("manufacturer");
marketName = jsonObject.getString("market_name");
codename = jsonObject.getString("codename");
model = jsonObject.getString("model");
}
/**
* @return the consumer friendly name of the device.
*/
public String getName() {
if (!TextUtils.isEmpty(marketName)) {
return marketName;
}
return capitalize(model);
}
}
}
|
library/src/main/java/com/jaredrummler/android/device/DeviceName.java
|
/*
* Copyright (C) 2015. Jared Rummler <me@jaredrummler.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.jaredrummler.android.device;
import android.Manifest;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* <p>On many Android devices, the best way to display the device name to the user is by getting
* the
* value of {@link android.os.Build#MODEL}. Unfortunately, on many popular devices, this value is
* not consumer friendly. For example, on the Samsung Galaxy S6 the value of {@link Build#MODEL}
* could be "SM-G920F", "SM-G920I", "SM-G920W8", etc.</p>
*
* <p>To get the market (consumer friendly) name of a device you can use one (or both) of the
* following examples:</p>
*
* <b>Example 1:</b>
*
* <br>
* <p>{@code String deviceName = DeviceName.getDeviceName();}</p>
* <b>Example 2:</b>
* <br>
* <pre>
* <code>
* DeviceName.with(context).request(new DeviceName.Callback() {
* {@literal @}Override
* public void onFinished(DeviceName.DeviceInfo info, Exception error) {
* String deviceName;
* if (error != null) {
* deviceName = info.getName();
* } else {
* deviceName = DeviceName.getDeviceName();
* }
* }
* });
* </code>
* </pre>
* <p>
* <b>Example 1:</b> contains over 600 popular Android devices and can be run on the UI thread. If
* the current device is not in the list then {@link Build#MODEL} will be returned as a fallback.
* </p>
* <p>
* <b>Example 2:</b> loads JSON from a generated list of device names based on Google's maintained
* list and contains around 10,000 devices. This needs a network connection and is run in a
* background thread.
* </p>
*
* @author Jared Rummler
*/
public class DeviceName {
// JSON which is derived from Google's PDF document which contains all devices on Google Play
// To get the URL to the JSON file which contains information about the device name:
// String url = String.format(DEVICE_JSON_URL, Build.DEVICE);
private static final String DEVICE_JSON_URL =
"https://raw.githubusercontent.com/jaredrummler/android-devices/master/json/devices/%s.json";
// Preference filename for storing device info so we don't need to download it again.
private static final String SHARED_PREF_NAME = "device_names";
/**
* Create a new request to get information about a device.
*
* @param context
* the application context
* @return a new Request instance.
*/
public static Request with(Context context) {
return new Request(context.getApplicationContext());
}
/**
* Get the consumer friendly name of the device.
*
* @return the market name of the current device.
* @see #getDeviceName(String, String)
*/
public static String getDeviceName() {
String manufacturer = Build.MANUFACTURER;
String model = Build.MODEL;
String fallback;
if (model.startsWith(manufacturer)) {
fallback = capitalize(model);
} else {
fallback = capitalize(manufacturer) + " " + model;
}
return getDeviceName(Build.DEVICE, fallback);
}
/**
* Get the consumer friendly name of a device.
*
* @param codename
* the value of the system property "ro.product.device" ({@link Build#DEVICE}).
* @param fallback
* the fallback name if the device is unknown. Usually the value of the system property
* "ro.product.model" ({@link Build#MODEL})
* @return the market name of a device or {@code fallback} if the device is unknown.
*/
public static String getDeviceName(String codename, String fallback) {
switch (codename) {
case "quark":
return "DROID Turbo";
case "K01N_2":
case "K01N_1":
case "K012":
case "K00Z":
return "Fonepad 7";
case "a3ltedd":
case "a3ulte":
case "a3ltezh":
case "a3ltechn":
case "a3lte":
case "a33g":
case "a3ltectc":
case "a3lteslk":
case "a3ltezt":
return "Galaxy A3";
case "a5ltectc":
case "a5ltezh":
case "a5ulteskt":
case "a53g":
case "a5ltezt":
case "a5lte":
case "a5ltechn":
case "a5ulte":
case "a5ultektt":
case "a5ultelgt":
return "Galaxy A5";
case "a8elte":
case "a8elteskt":
case "a8ltechn":
return "Galaxy A8";
case "vivaltods5m":
return "Galaxy Ace 4";
case "GT-S6802B":
case "GT-S6802":
case "SCH-i589":
case "SCH-i579":
case "GT-S6352":
case "SCH-I589":
case "SCH-I579":
return "Galaxy Ace Duos";
case "GT-S7500L":
case "GT-S7500":
case "GT-S7500T":
case "GT-S7508":
case "GT-S7500W":
return "Galaxy Ace Plus";
case "heatqlte":
case "heat3gtfnvzw":
case "heatnfc3g":
return "Galaxy Ace Style";
case "vivaltolte":
case "vivalto5mve3g":
case "vivaltonfc3g":
case "vivalto3g":
case "vivalto3mve3g":
return "Galaxy Ace4";
case "sltechn":
case "sltelgt":
case "slteskt":
case "sltektt":
case "sltecan":
case "slteatt":
case "slte":
return "Galaxy Alpha";
case "rossalte":
case "coreprimeltespr":
case "rossaltexsa":
case "coreprimelte":
case "coreprimeltevzw":
case "coreprimeve3g":
case "core33g":
case "cprimeltetmo":
case "coreprimelteaio":
case "cprimeltemtr":
case "rossaltectc":
case "coreprimeltetfnvzw":
case "coreprimevelte":
return "Galaxy Core Prime";
case "kanas3g":
case "kanas3gnfc":
case "kanas3gcmcc":
case "kanas":
case "kanas3gctc":
return "Galaxy Core2";
case "e5lte":
case "e53g":
case "e5ltetfnvzw":
case "e5ltetw":
return "Galaxy E5";
case "e7lte":
case "e7ltehktw":
case "e7ltechn":
case "e73g":
case "e7ltectc":
return "Galaxy E7";
case "nevis":
case "SCH-I629":
case "nevis3gcmcc":
case "nevisw":
case "nevis3g":
case "nevisp":
case "nevisnvess":
case "nevisvess":
case "nevisds":
return "Galaxy Fame";
case "baffinq3g":
case "baffinlitedtv":
case "baffinlite":
return "Galaxy Grand Neo";
case "gprimelteusc":
case "grandprimeveltezt":
case "grandprimelte":
case "fortunaltezh":
case "gprimeltetfnvzw":
case "fortunalte":
case "gprimeltecan":
case "fortunaltectc":
case "grandprimevelteltn":
case "gprimeltespr":
case "fortuna3gdtv":
case "gprimeltetmo":
case "fortuna3g":
case "grandprimeve3g":
case "fortunaltezt":
case "grandprimelteaio":
case "grandprimevelte":
case "grandprimeve3gdtv":
return "Galaxy Grand Prime";
case "ms013g":
case "ms01lte":
case "ms013gdtv":
case "ms01ltelgt":
case "ms013gss":
case "ms01lteskt":
case "ms01ltektt":
return "Galaxy Grand2";
case "toroplus":
case "maguro":
case "toro":
return "Galaxy Nexus";
case "p4noterf":
case "p4noteltektt":
case "p4notewifiww":
case "p4noterfktt":
case "lt03ltektt":
case "p4notewifi43241any":
case "p4noteltespr":
case "p4noteltevzw":
case "p4notelte":
case "p4noteltelgt":
case "lt03ltelgt":
case "p4notelteskt":
case "lt033g":
case "lt03lteskt":
case "p4notelteusc":
case "p4notewifi":
case "p4notewifiany":
case "p4notewifiktt":
return "Galaxy Note 10.1";
case "tbltechn":
case "tbltecan":
case "tblteatt":
case "tbeltelgt":
case "tbltevzw":
case "tbeltektt":
case "tbelteskt":
case "tbltespr":
case "tblteusc":
case "SCL24":
case "tblte":
case "tbltetmo":
return "Galaxy Note Edge";
case "v1awifi":
case "v1awifikx":
case "viennalte":
case "viennaltevzw":
case "v1a3g":
case "viennaltekx":
case "viennalteatt":
return "Galaxy Note Pro 12.2";
case "t0ltecmcc":
case "SC-02E":
case "t03gchn":
case "t0ltevzw":
case "t0ltetmo":
case "t03g":
case "t0ltespr":
case "t0ltelgt":
case "t0lteskt":
case "t03gcmcc":
case "t0lteatt":
case "t03gchnduos":
case "t0lteusc":
case "t03gctc":
case "t0lte":
case "t0ltektt":
case "t0ltedcm":
case "t03gcuduos":
case "t0ltecan":
return "Galaxy Note2";
case "SC-02F":
case "hltelgt":
case "hlteskt":
case "hltetmo":
case "hlte":
case "hltespr":
case "hlteatt":
case "htdlte":
case "SCL22":
case "hltektt":
case "hlteusc":
case "hltevzw":
case "ha3g":
case "hltecan":
return "Galaxy Note3";
case "hl3gds":
case "hllte":
case "frescoltektt":
case "hl3g":
case "frescolteskt":
case "frescoltelgt":
return "Galaxy Note3 Neo";
case "tre3caltelgt":
case "trltechn":
case "trlte":
case "tre3calteskt":
case "trlteusc":
case "tre3g":
case "treltektt":
case "trlteatt":
case "trltechnzh":
case "tre3caltektt":
case "treltelgt":
case "trltevzw":
case "trltespr":
case "trltetmo":
case "trelteskt":
case "trltecan":
case "trelte":
case "trhplte":
return "Galaxy Note4";
case "noblelteusc":
case "nobleltechn":
case "nobleltektt":
case "nobleltecmcc":
case "nobleltevzw":
case "noblelte":
case "nobleltetmo":
case "nobleltespr":
case "nobleltebmc":
case "nobleltehk":
case "nobleltelgt":
case "noblelteskt":
case "noblelteatt":
return "Galaxy Note5";
case "kyletdcmcc":
case "kylechn":
case "kyleichn":
case "kyleopen":
return "Galaxy S Duos";
case "kyleprods":
return "Galaxy S Duos2";
case "vivalto3gvn":
return "Galaxy S Duos3";
case "SHW-M250L":
case "GT-I9108":
case "SHW-M250K":
case "SCH-R760X":
case "SC-02C":
case "SGH-T989":
case "GT-I9100":
case "SHW-M250S":
case "GT-I9103":
case "GT-I9100M":
case "GT-I9100P":
case "GT-I9210T":
case "GT-I9100T":
case "SGH-I777":
case "t1cmcc":
case "SHV-E110S":
case "SGH-S959G":
return "Galaxy S2";
case "SC-03E":
case "d2xar":
case "d2mtr":
case "d2spi":
case "d2vmu":
case "d2att":
case "m0skt":
case "d2lterefreshspr":
case "d2tmo":
case "d2cri":
case "m0ctc":
case "d2dcm":
case "d2can":
case "c1lgt":
case "m0":
case "c1skt":
case "m3":
case "d2lteMetroPCS":
case "d2ltetmo":
case "d2usc":
case "m3dcm":
case "m0chn":
case "d2tfnvzw":
case "m0apt":
case "c1ktt":
case "d2tfnspr":
case "c1att":
case "d2vzw":
case "m0ctcduos":
case "d2spr":
case "m0cmcc":
return "Galaxy S3";
case "goldenltevzw":
case "goldenlteatt":
case "golden":
case "goldenve3g":
case "goldenltebmc":
return "Galaxy S3 Mini";
case "s3ve3g":
case "s3ve3gdsdd":
case "s3ve3gds":
case "s3ve3gdd":
return "Galaxy S3 Neo";
case "jalteskt":
case "jfltetmo":
case "ks01ltektt":
case "jfvelte":
case "jaltelgt":
case "jfltecri":
case "jfltelra":
case "jfltetfntmo":
case "jflterefreshspr":
case "jflteusc":
case "ks01ltelgt":
case "jflteaio":
case "jfltecan":
case "ks01lte":
case "jfltespr":
case "SC-04E":
case "jflte":
case "jfltevzw":
case "jfwifi":
case "jfltecsp":
case "jfltetfnatt":
case "jaltektt":
case "jflteatt":
case "jsglte":
case "ja3g":
case "ja3gduosctc":
case "jftdd":
return "Galaxy S4";
case "klte":
case "kltektt":
case "klteduoszn":
case "kltecan":
case "klteMetroPCS":
case "lentisltelgt":
case "lentislteskt":
case "klteacg":
case "klteusc":
case "SCL23":
case "klteaio":
case "kltelra":
case "klteskt":
case "lentisltektt":
case "klteatt":
case "kltevzw":
case "kltespr":
case "kwifi":
case "kltetmo":
case "k3g":
case "kltelgt":
return "Galaxy S5";
case "zerofltechn":
case "zeroflteaio":
case "zeroflte":
case "zerofltektt":
case "zerofltemtr":
case "zeroflteatt":
case "zeroflteusc":
case "zeroflteacg":
case "zerofltelgt":
case "zeroflteskt":
case "zerofltebmc":
case "zerofltelra":
case "zerofltectc":
case "zerofltevzw":
case "zerofltespr":
case "zerofltetmo":
case "zerofltetfnvzw":
return "Galaxy S6";
case "zeroltektt":
case "zeroltechn":
case "zerolteacg":
case "zerolteusc":
case "zeroltevzw":
case "zeroltelra":
case "zerolteskt":
case "zeroltelgt":
case "SCV31":
case "zeroltebmc":
case "zeroltetmo":
case "zeroltespr":
case "zerolte":
case "zerolteatt":
case "404SC":
return "Galaxy S6 Edge";
case "zenltebmc":
case "zenltechn":
case "zenltelgt":
case "zenltetmo":
case "zenltespr":
case "zenltevzw":
case "zenlte":
case "zenlteatt":
case "zenlteskt":
case "zenltektt":
return "Galaxy S6 Edge+";
case "SC-01D":
case "SHW-M380K":
case "GT-P7500":
case "SCH-I905":
case "GT-P7510":
case "SHW-M300W":
case "GT-P7503":
case "SGH-T859":
case "SHW-M380S":
case "GT-P7500D":
case "SHW-M380W":
return "Galaxy Tab 10.1";
case "GT-P6210":
case "SC-02D":
case "GT-P6201":
case "GT-P6200":
case "GT-P6211":
case "GT-P6200L":
case "SGH-T869":
case "SHW-M430W":
return "Galaxy Tab 7.0 Plus";
case "degaswifiue":
case "403SC":
case "degasltespr":
case "degasveltechn":
case "degaswifiopenbnn":
case "degasvelte":
case "degaswifibmwzc":
case "degaswifidtv":
case "degasltevzw":
case "degaswifi":
case "degas3g":
case "degaslte":
return "Galaxy Tab4 7.0";
case "GT-S5368":
case "SCH-i509":
case "GT-S5360T":
case "SCH-I509":
case "GT-S5369":
case "GT-S5360":
case "GT-S5360L":
case "GT-S5360B":
case "GT-S5363":
return "Galaxy Y";
case "m7cdug":
case "m7cdtu":
case "m7":
case "m7wlv":
case "m7cdwg":
case "m7wls":
return "HTC One";
case "htc_mecul":
case "htc_mectl":
case "htc_mecwhl":
case "htc_mecdwg":
return "HTC One (E8)";
case "htc_m8wl":
case "htc_m8dwg":
case "htc_m8dug":
case "htc_m8":
case "htc_m8whl":
return "HTC One (M8)";
case "htc_himauhl":
case "htc_himawhl":
case "htc_himaulatt":
case "htc_himaul":
case "htc_himawl":
return "HTC One M9";
case "villec2":
case "ville":
return "HTC One S";
case "hwH30-T10":
case "hwH30-U10":
case "hwhn3-u00":
case "hwhn3-u01":
return "Honor3";
case "acer_harleyfhd":
case "acer_harley":
return "Iconia Tab 10";
case "acer_apriliahd":
case "acer_aprilia":
return "Iconia Tab 7";
case "ducati2fhd":
case "ducati2hd":
case "ducati2hd3g":
return "Iconia Tab 8";
case "zee":
return "LG G Flex";
case "z2":
return "LG G Flex2";
case "g2":
return "LG G2";
case "g3":
return "LG G3";
case "p1":
return "LG G4";
case "c50ds":
case "c50n":
case "c50":
return "LG Leon 4G LTE";
case "cosmopolitan":
return "LG Optimus 3D";
case "geehdc":
case "geehrc":
case "geehrc4g":
case "geeb":
return "LG Optimus G";
case "geefhd4g":
case "geefhd":
return "LG Optimus G Pro";
case "u2":
return "LG Optimus L9";
case "a1":
return "Liquid";
case "acer_e3n":
case "acer_e3":
return "Liquid E3";
case "acer_S55":
return "Liquid Jade";
case "acer_S56":
return "Liquid Jade S";
case "s3":
return "Liquid S3";
case "acer_ZXL":
return "Liquid Z5";
case "surnia_uds":
case "condor_cdma":
case "condor_umts":
case "condor_umtsds":
case "condor_udstv":
case "otus":
case "otus_ds":
case "surnia_cdma":
case "surnia_umts":
case "surnia_udstv":
return "MOTO E";
case "osprey_udstv":
case "osprey_umts":
case "thea_umtsds":
case "falcon_umts":
case "thea":
case "titan_umtsds":
case "peregrine":
case "osprey_cdma":
case "titan_udstv":
case "titan_umts":
case "osprey_uds":
case "osprey_ud2":
case "falcon_umtsds":
case "thea_ds":
case "falcon_cdma":
case "osprey_u2":
return "MOTO G";
case "ghost":
case "victara":
return "MOTO X";
case "K013":
case "K01A":
case "K017":
case "K013C":
case "K01U_2":
case "K01U_1":
case "K013_1":
return "MeMO Pad 7";
case "clark":
return "Moto X Style";
case "manta":
return "Nexus 10";
case "hammerhead":
return "Nexus 5";
case "shamu":
return "Nexus 6";
case "grouper":
case "tilapia":
return "Nexus 7 (2012)";
case "deb":
case "flo":
return "Nexus 7 (2013)";
case "flounder":
return "Nexus 9";
case "A0001":
return "OnePlus One";
case "p990":
case "p990_CIS-xxx":
case "star":
case "star_450-05":
case "su660":
case "p990_EUR-xx":
case "p990hN":
case "p999":
case "p990_262-xx":
return "Optimus 2X";
case "cosmo_450-05":
case "p920":
case "cosmo_EUR-XXX":
case "su760":
case "cosmo_MEA-XXX":
return "Optimus 3D";
case "cx2":
return "Optimus 3D MAX";
case "bproj_214-03":
case "bproj_EUR-XXX":
case "bproj_ARE-XXX":
case "black":
case "LGL85C":
case "bproj_302-220":
case "ku5900":
case "bproj_262-XXX":
case "blackg":
case "bproj_sea-xxx":
case "bproj_724-xxx":
case "lgp970":
case "bproj_334-020":
return "Optimus Black";
case "m4":
return "Optimus L5";
case "i_skt":
case "iproj":
case "lgp930":
case "i_dcm":
case "lgp935":
case "i_u":
return "Optimus LTE";
case "su370":
case "ku3700":
case "thunder_kor-05":
case "lu3700":
case "thunderc":
case "thunder_kor-08":
return "Optimus One";
case "l06c":
case "v901ar":
case "v905r":
case "v900":
case "v900asia":
case "v901tr":
case "v909mkt":
case "v909":
case "v901kr":
return "Optimus Pad";
case "thunderbird":
case "LW":
case "Venue7":
return "Venue 7";
case "BB":
case "yellowtail":
case "Venue8":
return "Venue 8";
case "wifi_hubble":
case "umts_hubble":
case "umts_everest":
case "stingray":
case "wingray":
return "XOOM";
case "D2203":
case "D2202":
case "D2243":
case "D2206":
return "Xperia E3";
case "E5653":
case "E5606":
case "E5603":
return "Xperia M5";
case "E5643":
case "E5633":
case "E5663":
return "Xperia M5 Dual";
case "SO-02D":
case "LT26i":
return "Xperia S";
case "D5316":
case "D5303":
case "D5322":
case "D5316N":
case "D5306":
return "Xperia T2 Ultra";
case "txs03":
return "Xperia Tablet S";
case "SO-03E":
case "SGP312":
case "SGP311":
case "SGP321":
case "SGP341":
case "SGP351":
return "Xperia Tablet Z";
case "D6503":
case "D6502":
case "SO-03F":
case "D6543":
return "Xperia Z2";
case "D6603":
case "401SO":
case "SOL26":
case "D6643":
case "D6653":
case "SO-01G":
case "leo":
case "D6616":
return "Xperia Z3";
case "402SO":
case "SO-03G":
case "SOV31":
return "Xperia Z4";
case "E5823":
case "E5803":
return "Xperia Z5 Compact";
case "ASUS_Z00RD_7":
case "ASUS_Z00RD_5":
return "ZenFone 2";
case "ASUS_T00I":
case "ASUS_T00Q":
return "ZenFone 4";
case "ASUS_T00F1":
case "ASUS_T00K":
case "ASUS_T00J":
case "ASUS_T00F":
case "ASUS_T00J1":
return "ZenFone 5";
default:
return fallback;
}
}
/**
* <p>
* Capitalizes getAllProcesses the whitespace separated words in a String. Only the first letter
* of each word is changed.
* </p>
* Whitespace is defined by {@link Character#isWhitespace(char)}.
*
* @param str
* the String to capitalize
* @return capitalized The capitalized String
*/
private static String capitalize(String str) {
if (TextUtils.isEmpty(str)) {
return str;
}
char[] arr = str.toCharArray();
boolean capitalizeNext = true;
String phrase = "";
for (char c : arr) {
if (capitalizeNext && Character.isLetter(c)) {
phrase += Character.toUpperCase(c);
capitalizeNext = false;
continue;
} else if (Character.isWhitespace(c)) {
capitalizeNext = true;
}
phrase += c;
}
return phrase;
}
/** Download URL to String */
private static String downloadJson(String myurl) throws IOException {
StringBuilder sb = new StringBuilder();
BufferedReader reader = null;
try {
URL url = new URL(myurl);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setReadTimeout(10000);
conn.setConnectTimeout(15000);
conn.setRequestMethod("GET");
conn.setDoInput(true);
conn.connect();
if (conn.getResponseCode() == HttpURLConnection.HTTP_OK) {
reader = new BufferedReader(new InputStreamReader(conn.getInputStream()));
String line;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
}
}
return sb.toString();
} finally {
if (reader != null) {
reader.close();
}
}
}
/** Get the device name from the generated JSON files created from Google's device list. */
private static DeviceInfo getDeviceInfo(Context context, String codename, String model) {
SharedPreferences prefs = context.getSharedPreferences(SHARED_PREF_NAME, Context.MODE_PRIVATE);
String key = String.format("%s:%s", codename, model);
String savedJson = prefs.getString(key, null);
if (savedJson != null) {
try {
return new DeviceInfo(new JSONObject(savedJson));
} catch (JSONException e) {
e.printStackTrace();
}
}
// check if we have an internet connection
int ret = context.checkCallingOrSelfPermission(Manifest.permission.ACCESS_NETWORK_STATE);
boolean isConnectedToNetwork = false;
if (ret == PackageManager.PERMISSION_GRANTED) {
ConnectivityManager connMgr = (ConnectivityManager)
context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connMgr.getActiveNetworkInfo();
if (networkInfo != null && networkInfo.isConnected()) {
isConnectedToNetwork = true;
}
} else {
// assume we are connected.
isConnectedToNetwork = true;
}
if (isConnectedToNetwork) {
try {
String url = String.format(DEVICE_JSON_URL, codename);
String jsonString = downloadJson(url);
JSONArray jsonArray = new JSONArray(jsonString);
for (int i = 0, len = jsonArray.length(); i < len; i++) {
JSONObject json = jsonArray.getJSONObject(i);
DeviceInfo info = new DeviceInfo(json);
if (codename.equals(info.codename) && model.equals(info.model)) {
// Save to SharedPreferences so we don't need to make another request.
prefs.edit().putString(key, json.toString()).commit();
return info;
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
if (codename.equals(Build.DEVICE) && model.equals(Build.MODEL)) {
// current device
return new DeviceInfo(
Build.MANUFACTURER,
getDeviceName(),
codename,
model);
}
// unknown device
return new DeviceInfo(
null,
null,
codename,
model);
}
public static final class Request {
private final Context context;
private final Handler handler;
private String codename;
private String model;
private Request(Context ctx) {
context = ctx;
handler = new Handler(ctx.getMainLooper());
codename = Build.DEVICE;
model = Build.MODEL;
}
/**
* Set the device codename to query. You should also set the model.
*
* @param codename
* the value of the system property "ro.product.device"
* @return This Request object to allow for chaining of calls to set methods.
* @see Build#DEVICE
*/
public Request setCodename(String codename) {
this.codename = codename;
return this;
}
/**
* Set the device model to query. You should also set the codename.
*
* @param model
* the value of the system property "ro.product.model"
* @return This Request object to allow for chaining of calls to set methods.
* @see Build#MODEL
*/
public Request setModel(String model) {
this.model = model;
return this;
}
/**
* Download information about the device. This saves the results in shared-preferences so
* future requests will not need a network connection.
*
* @param callback
* the callback to retrieve the {@link DeviceName.DeviceInfo}
*/
public void request(Callback callback) {
if (Looper.myLooper() == Looper.getMainLooper()) {
new Thread(runnable(callback)).start();
} else {
runnable(callback).run(); // already running in background thread.
}
}
private Runnable runnable(final Callback callback) {
return new Runnable() {
DeviceInfo deviceInfo;
Exception error;
@Override
public void run() {
try {
deviceInfo = getDeviceInfo(context, codename, model);
} catch (Exception e) {
error = e;
}
handler.post(new Runnable() {
@Override
public void run() {
callback.onFinished(deviceInfo, error);
}
});
}
};
}
}
/**
* Callback which is invoked when the {@link DeviceName.DeviceInfo} is finished loading.
*/
public interface Callback {
/**
* Callback to get the device info. This is run on the UI thread.
*
* @param info
* the requested {@link DeviceName.DeviceInfo}
* @param error
* {@code null} if nothing went wrong.
*/
void onFinished(DeviceInfo info, Exception error);
}
/**
* Device information based on
* <a href="https://support.google.com/googleplay/answer/1727131">Google's maintained list</a>.
*/
public static final class DeviceInfo {
/** Retail branding */
public final String manufacturer;
/** Marketing name */
public final String marketName;
/** the value of the system property "ro.product.device" */
public final String codename;
/** the value of the system property "ro.product.model" */
public final String model;
public DeviceInfo(String manufacturer, String marketName, String codename, String model) {
this.manufacturer = manufacturer;
this.marketName = marketName;
this.codename = codename;
this.model = model;
}
private DeviceInfo(JSONObject jsonObject) throws JSONException {
manufacturer = jsonObject.getString("manufacturer");
marketName = jsonObject.getString("market_name");
codename = jsonObject.getString("codename");
model = jsonObject.getString("model");
}
/**
* @return the consumer friendly name of the device.
*/
public String getName() {
if (!TextUtils.isEmpty(marketName)) {
return marketName;
}
return capitalize(model);
}
}
}
|
Update JSON url
|
library/src/main/java/com/jaredrummler/android/device/DeviceName.java
|
Update JSON url
|
|
Java
|
apache-2.0
|
375b273ceda746cd1742dffd2daa7681a9d8ebcc
| 0
|
Aweitzel86/TestCase2.1,festusjejelowo/commons-csv,Aweitzel86/TestCase2.1,mbreslow/commons-csv,UzumakiMansi/commons-csv,fadysamirzakarya/commons-csv,dakinyade/commons-csv,muhammadallee/commons-csv,rayiss/commons-csv,festusjejelowo/commons-csv,khalilrahman/commons-csv,amee-trivedi/commons-csv,lihenu/Crossover_project,jmhanna/commons-csv,gargchap/gargvaibhav,quettech/qa2,AndrewGuthua/CrossOverTest2,mohanaraosv/commons-csv,catconst/commons-csv,warriorno22/commons-csv,najamalvi/PRODSUP-002,amee-trivedi/commons-csv,shashankasharma/commons-csv,shadykandeel/commons-csv,quettech/qa2,jtardaguila/test2,fadysamirzakarya/common-csv-2,shubhcollaborator/common-csvnew,sruputway/commons-csv_test,GauriGNaik/commons-csv,Elttbakh/Test02,syedbilalmasaud/case2,chio003/Test2,shashankasharma/commons-csv,shashankasharma/commons-csv,thanhnbt/commons-csv,rayiss/commons-csv,viliescu/PRODSUP-002,UzumakiMansi/commons-csv,COTechTrial/case2,sruputway/commons-csv_test,chronoangelus/commons-csv,sufianqayyum131/PRODSUP-002,chio003/Test2,muhammadallee/commons-csv,pvllnspk/commons-csv,arunpaulonline/test2,fadysamirzakarya/common-csv-2,AndrewGuthua/CrossOverTest2,harikrishna1947a/csv,afafhassan/commons-csv,RavinaDhruve/commons-csv,najamalvi/PRODSUP-002,iffi101/commons-csv,parmarsumit/commons-csv,quettech/csv-import,SCORPIO12/Case2,DGAlexandru/commons-csv,syedbilalmasaud/case2,sufianqayyum131/PRODSUP-002,jmhanna/commons-csv,warriorno22/commons-csv,quettech/csv-import,viliescu/PRODSUP-002,DGAlexandru/commons-csv,dakinyade/commons-csv,arunnairvyaj/commons-csv-trunk,fabriciobressan/crossover_question2,mohanaraosv/commons-csv,RavinaDhruve/commons-csv,COTechTrial/case2,lihenu/Crossover_project,arunnairvyaj/commons-csv-trunk,UzumakiMansi/commons-csv,expertryk/commons-csv,afafhassan/commons-csv,shashankasharma/commons-csv,SCORPIO12/Case2,fadysamirzakarya/commons-csv,catconst/commons-csv,Elttbakh/Test03,Elttbakh/Test03,arunpaulonline/test2,pvllnspk/commons-csv,shacore10/commons-csv,catconst/commons-csv,mirasrael/commons-csv,parmarsumit/commons-csv,Elttbakh/Test02,mirasrael/commons-csv,iffi101/commons-csv,shadykandeel/commons-csv,fabriciobressan/crossover_question2,mbreslow/commons-csv,shacore10/commons-csv,shubhcollaborator/common-csvnew,gargchap/gargvaibhav,chronoangelus/commons-csv,jtardaguila/test2,expertryk/commons-csv,thanhnbt/commons-csv,harikrishna1947a/csv,GauriGNaik/commons-csv,khalilrahman/commons-csv,apache/commons-csv,apache/commons-csv
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.csv;
import static org.apache.commons.csv.Constants.CR;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import org.junit.Test;
/**
*
*
* @version $Id$
*/
public class CSVPrinterTest {
private final String recordSeparator = CSVFormat.DEFAULT.getRecordSeparator();
private static String printable(final String s) {
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
final char ch = s.charAt(i);
if (ch <= ' ' || ch >= 128) {
sb.append("(").append((int) ch).append(")");
} else {
sb.append(ch);
}
}
return sb.toString();
}
private void doOneRandom(final CSVFormat format) throws Exception {
final Random r = new Random();
final int nLines = r.nextInt(4) + 1;
final int nCol = r.nextInt(3) + 1;
// nLines=1;nCol=2;
final String[][] lines = new String[nLines][];
for (int i = 0; i < nLines; i++) {
final String[] line = new String[nCol];
lines[i] = line;
for (int j = 0; j < nCol; j++) {
line[j] = randStr();
}
}
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, format);
for (int i = 0; i < nLines; i++) {
// for (int j=0; j<lines[i].length; j++) System.out.println("### VALUE=:" + printable(lines[i][j]));
printer.printRecord((Object[])lines[i]);
}
printer.flush();
printer.close();
final String result = sw.toString();
// System.out.println("### :" + printable(result));
final CSVParser parser = CSVParser.parse(result, format);
final List<CSVRecord> parseResult = parser.getRecords();
Utils.compare("Printer output :" + printable(result), lines, parseResult);
}
private void doRandom(final CSVFormat format, final int iter) throws Exception {
for (int i = 0; i < iter; i++) {
doOneRandom(format);
}
}
private String randStr() {
final Random r = new Random();
final int sz = r.nextInt(20);
// sz = r.nextInt(3);
final char[] buf = new char[sz];
for (int i = 0; i < sz; i++) {
// stick in special chars with greater frequency
char ch;
final int what = r.nextInt(20);
switch (what) {
case 0:
ch = '\r';
break;
case 1:
ch = '\n';
break;
case 2:
ch = '\t';
break;
case 3:
ch = '\f';
break;
case 4:
ch = ' ';
break;
case 5:
ch = ',';
break;
case 6:
ch = '"';
break;
case 7:
ch = '\'';
break;
case 8:
ch = '\\';
break;
default:
ch = (char) r.nextInt(300);
break;
// default: ch = 'a'; break;
}
buf[i] = ch;
}
return new String(buf);
}
@Test
public void testDisabledComment() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printComment("This is a comment");
assertEquals("", sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllArrayOfArrays() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(new String[][] { { "r1c1", "r1c2" }, { "r2c1", "r2c2" } });
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllArrayOfLists() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(new List[] { Arrays.asList("r1c1", "r1c2"), Arrays.asList("r2c1", "r2c2") });
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllIterableOfArrays() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(Arrays.asList(new String[][] { { "r1c1", "r1c2" }, { "r2c1", "r2c2" } }));
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllIterableOfLists() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(Arrays.asList(new List[] { Arrays.asList("r1c1", "r1c2"),
Arrays.asList("r2c1", "r2c2") }));
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrinter1() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecord("a", "b");
assertEquals("a,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrinter2() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecord("a,b", "b");
assertEquals("\"a,b\",b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testJdbcPrinter() throws IOException, ClassNotFoundException, SQLException {
final StringWriter sw = new StringWriter();
Class.forName("org.h2.Driver");
final Connection connection = DriverManager.getConnection("jdbc:h2:mem:my_test;", "sa", "");
try {
final Statement stmt = connection.createStatement();
stmt.execute("CREATE TABLE TEST(ID INT PRIMARY KEY, NAME VARCHAR(255))");
stmt.execute("insert into TEST values(1, 'r1')");
stmt.execute("insert into TEST values(2, 'r2')");
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecords(stmt.executeQuery("select ID, NAME from TEST"));
assertEquals("1,r1" + recordSeparator + "2,r2" + recordSeparator, sw.toString());
printer.close();
} finally {
connection.close();
}
}
@Test
public void testMultiLineComment() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withCommentStart('#'));
printer.printComment("This is a comment\non multiple lines");
assertEquals("# This is a comment" + recordSeparator + "# on multiple lines" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter1() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b");
assertEquals("a,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter2() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a,b", "b");
assertEquals("\"a,b\",b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter3() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a, b", "b ");
assertEquals("\"a, b\",\"b \"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter4() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\"c");
assertEquals("a,\"b\"\"c\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter5() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\nc");
assertEquals("a,\"b\nc\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter6() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\r\nc");
assertEquals("a,\"b\r\nc\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter7() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\\c");
assertEquals("a,b\\c" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrintNullValues() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", null, "b");
assertEquals("a,,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrintCustomNullValues() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withNullString("NULL"));
printer.printRecord("a", null, "b");
assertEquals("a,NULL,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testParseCustomNullValues() throws IOException {
final StringWriter sw = new StringWriter();
final CSVFormat format = CSVFormat.DEFAULT.withNullString("NULL");
final CSVPrinter printer = new CSVPrinter(sw, format);
printer.printRecord("a", null, "b");
printer.close();
final String csvString = sw.toString();
assertEquals("a,NULL,b" + recordSeparator, csvString);
final Iterable<CSVRecord> iterable = format.parse(new StringReader(csvString));
final Iterator<CSVRecord> iterator = iterable.iterator();
final CSVRecord record = iterator.next();
assertEquals("a", record.get(0));
assertEquals(null, record.get(1));
assertEquals("b", record.get(2));
assertFalse(iterator.hasNext());
}
@Test
public void testQuoteAll() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuotePolicy(Quote.ALL));
printer.printRecord("a", "b\nc", "d");
assertEquals("\"a\",\"b\nc\",\"d\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testQuoteNonNumeric() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuotePolicy(Quote.NON_NUMERIC));
printer.printRecord("a", "b\nc", Integer.valueOf(1));
assertEquals("\"a\",\"b\nc\",1" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testRandom() throws Exception {
final int iter = 10000;
doRandom(CSVFormat.DEFAULT, iter);
doRandom(CSVFormat.EXCEL, iter);
doRandom(CSVFormat.MYSQL, iter);
}
@Test
public void testPlainQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("abc");
assertEquals("abc", sw.toString());
printer.close();
}
@Test
public void testSingleLineComment() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withCommentStart('#'));
printer.printComment("This is a comment");
assertEquals("# This is a comment" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testSingleQuoteQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("a'b'c");
printer.print("xyz");
assertEquals("'a''b''c',xyz", sw.toString());
printer.close();
}
@Test
public void testDelimeterQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("a,b,c");
printer.print("xyz");
assertEquals("'a,b,c',xyz", sw.toString());
printer.close();
}
@Test
public void testDelimeterQuoteNONE() throws IOException {
final StringWriter sw = new StringWriter();
final CSVFormat format = CSVFormat.DEFAULT.withEscape('!').withQuotePolicy(Quote.NONE);
final CSVPrinter printer = new CSVPrinter(sw, format);
printer.print("a,b,c");
printer.print("xyz");
assertEquals("a!,b!,c,xyz", sw.toString());
printer.close();
}
@Test
public void testEOLQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("a\rb\nc");
printer.print("x\by\fz");
assertEquals("'a\rb\nc',x\by\fz", sw.toString());
printer.close();
}
@Test
public void testPlainEscaped() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null).withEscape('!'));
printer.print("abc");
printer.print("xyz");
assertEquals("abc,xyz", sw.toString());
printer.close();
}
@Test
public void testDelimiterEscaped() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withEscape('!').withQuoteChar(null));
printer.print("a,b,c");
printer.print("xyz");
assertEquals("a!,b!,c,xyz", sw.toString());
printer.close();
}
@Test
public void testEOLEscaped() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null).withEscape('!'));
printer.print("a\rb\nc");
printer.print("x\fy\bz");
assertEquals("a!rb!nc,x\fy\bz", sw.toString());
printer.close();
}
@Test
public void testPlainPlain() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null));
printer.print("abc");
printer.print("xyz");
assertEquals("abc,xyz", sw.toString());
printer.close();
}
@Test
public void testDelimiterPlain() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null));
printer.print("a,b,c");
printer.print("xyz");
assertEquals("a,b,c,xyz", sw.toString());
printer.close();
}
@Test
public void testEOLPlain() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null));
printer.print("a\rb\nc");
printer.print("x\fy\bz");
assertEquals("a\rb\nc,x\fy\bz", sw.toString());
printer.close();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidFormat() throws Exception {
final CSVFormat invalidFormat = CSVFormat.DEFAULT.withDelimiter(CR);
new CSVPrinter(null, invalidFormat).close();
}
}
|
src/test/java/org/apache/commons/csv/CSVPrinterTest.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.csv;
import static org.apache.commons.csv.Constants.CR;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.io.IOException;
import java.io.StringReader;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import org.junit.Test;
/**
*
*
* @version $Id$
*/
public class CSVPrinterTest {
private final String recordSeparator = CSVFormat.DEFAULT.getRecordSeparator();
private static String printable(final String s) {
final StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
final char ch = s.charAt(i);
if (ch <= ' ' || ch >= 128) {
sb.append("(").append((int) ch).append(")");
} else {
sb.append(ch);
}
}
return sb.toString();
}
private void doOneRandom(final CSVFormat format) throws Exception {
final Random r = new Random();
final int nLines = r.nextInt(4) + 1;
final int nCol = r.nextInt(3) + 1;
// nLines=1;nCol=2;
final String[][] lines = new String[nLines][];
for (int i = 0; i < nLines; i++) {
final String[] line = new String[nCol];
lines[i] = line;
for (int j = 0; j < nCol; j++) {
line[j] = randStr();
}
}
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, format);
for (int i = 0; i < nLines; i++) {
// for (int j=0; j<lines[i].length; j++) System.out.println("### VALUE=:" + printable(lines[i][j]));
printer.printRecord((Object[])lines[i]);
}
printer.flush();
printer.close();
final String result = sw.toString();
// System.out.println("### :" + printable(result));
final CSVParser parser = CSVParser.parse(result, format);
final List<CSVRecord> parseResult = parser.getRecords();
Utils.compare("Printer output :" + printable(result), lines, parseResult);
}
private void doRandom(final CSVFormat format, final int iter) throws Exception {
for (int i = 0; i < iter; i++) {
doOneRandom(format);
}
}
private String randStr() {
final Random r = new Random();
final int sz = r.nextInt(20);
// sz = r.nextInt(3);
final char[] buf = new char[sz];
for (int i = 0; i < sz; i++) {
// stick in special chars with greater frequency
char ch;
final int what = r.nextInt(20);
switch (what) {
case 0:
ch = '\r';
break;
case 1:
ch = '\n';
break;
case 2:
ch = '\t';
break;
case 3:
ch = '\f';
break;
case 4:
ch = ' ';
break;
case 5:
ch = ',';
break;
case 6:
ch = '"';
break;
case 7:
ch = '\'';
break;
case 8:
ch = '\\';
break;
default:
ch = (char) r.nextInt(300);
break;
// default: ch = 'a'; break;
}
buf[i] = ch;
}
return new String(buf);
}
@Test
public void testDisabledComment() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printComment("This is a comment");
assertEquals("", sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllArrayOfArrays() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(new String[][] { { "r1c1", "r1c2" }, { "r2c1", "r2c2" } });
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllArrayOfLists() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(new List[] { Arrays.asList(new String[] { "r1c1", "r1c2" }), Arrays.asList(new String[] { "r2c1", "r2c2" }) });
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllIterableOfArrays() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(Arrays.asList(new String[][] { { "r1c1", "r1c2" }, { "r2c1", "r2c2" } }));
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrintAllIterableOfLists() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecords(Arrays.asList(new List[] { Arrays.asList(new String[] { "r1c1", "r1c2" }),
Arrays.asList(new String[] { "r2c1", "r2c2" }) }));
assertEquals("r1c1,r1c2" + recordSeparator + "r2c1,r2c2" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrinter1() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecord("a", "b");
assertEquals("a,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testExcelPrinter2() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.EXCEL);
printer.printRecord("a,b", "b");
assertEquals("\"a,b\",b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testJdbcPrinter() throws IOException, ClassNotFoundException, SQLException {
final StringWriter sw = new StringWriter();
Class.forName("org.h2.Driver");
final Connection connection = DriverManager.getConnection("jdbc:h2:mem:my_test;", "sa", "");
try {
final Statement stmt = connection.createStatement();
stmt.execute("CREATE TABLE TEST(ID INT PRIMARY KEY, NAME VARCHAR(255))");
stmt.execute("insert into TEST values(1, 'r1')");
stmt.execute("insert into TEST values(2, 'r2')");
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecords(stmt.executeQuery("select ID, NAME from TEST"));
assertEquals("1,r1" + recordSeparator + "2,r2" + recordSeparator, sw.toString());
printer.close();
} finally {
connection.close();
}
}
@Test
public void testMultiLineComment() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withCommentStart('#'));
printer.printComment("This is a comment\non multiple lines");
assertEquals("# This is a comment" + recordSeparator + "# on multiple lines" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter1() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b");
assertEquals("a,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter2() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a,b", "b");
assertEquals("\"a,b\",b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter3() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a, b", "b ");
assertEquals("\"a, b\",\"b \"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter4() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\"c");
assertEquals("a,\"b\"\"c\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter5() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\nc");
assertEquals("a,\"b\nc\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter6() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\r\nc");
assertEquals("a,\"b\r\nc\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrinter7() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", "b\\c");
assertEquals("a,b\\c" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrintNullValues() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT);
printer.printRecord("a", null, "b");
assertEquals("a,,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testPrintCustomNullValues() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withNullString("NULL"));
printer.printRecord("a", null, "b");
assertEquals("a,NULL,b" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testParseCustomNullValues() throws IOException {
final StringWriter sw = new StringWriter();
final CSVFormat format = CSVFormat.DEFAULT.withNullString("NULL");
final CSVPrinter printer = new CSVPrinter(sw, format);
printer.printRecord("a", null, "b");
printer.close();
final String csvString = sw.toString();
assertEquals("a,NULL,b" + recordSeparator, csvString);
final Iterable<CSVRecord> iterable = format.parse(new StringReader(csvString));
final Iterator<CSVRecord> iterator = iterable.iterator();
final CSVRecord record = iterator.next();
assertEquals("a", record.get(0));
assertEquals(null, record.get(1));
assertEquals("b", record.get(2));
assertFalse(iterator.hasNext());
}
@Test
public void testQuoteAll() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuotePolicy(Quote.ALL));
printer.printRecord("a", "b\nc", "d");
assertEquals("\"a\",\"b\nc\",\"d\"" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testQuoteNonNumeric() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuotePolicy(Quote.NON_NUMERIC));
printer.printRecord("a", "b\nc", Integer.valueOf(1));
assertEquals("\"a\",\"b\nc\",1" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testRandom() throws Exception {
final int iter = 10000;
doRandom(CSVFormat.DEFAULT, iter);
doRandom(CSVFormat.EXCEL, iter);
doRandom(CSVFormat.MYSQL, iter);
}
@Test
public void testPlainQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("abc");
assertEquals("abc", sw.toString());
printer.close();
}
@Test
public void testSingleLineComment() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withCommentStart('#'));
printer.printComment("This is a comment");
assertEquals("# This is a comment" + recordSeparator, sw.toString());
printer.close();
}
@Test
public void testSingleQuoteQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("a'b'c");
printer.print("xyz");
assertEquals("'a''b''c',xyz", sw.toString());
printer.close();
}
@Test
public void testDelimeterQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("a,b,c");
printer.print("xyz");
assertEquals("'a,b,c',xyz", sw.toString());
printer.close();
}
@Test
public void testDelimeterQuoteNONE() throws IOException {
final StringWriter sw = new StringWriter();
final CSVFormat format = CSVFormat.DEFAULT.withEscape('!').withQuotePolicy(Quote.NONE);
final CSVPrinter printer = new CSVPrinter(sw, format);
printer.print("a,b,c");
printer.print("xyz");
assertEquals("a!,b!,c,xyz", sw.toString());
printer.close();
}
@Test
public void testEOLQuoted() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar('\''));
printer.print("a\rb\nc");
printer.print("x\by\fz");
assertEquals("'a\rb\nc',x\by\fz", sw.toString());
printer.close();
}
@Test
public void testPlainEscaped() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null).withEscape('!'));
printer.print("abc");
printer.print("xyz");
assertEquals("abc,xyz", sw.toString());
printer.close();
}
@Test
public void testDelimiterEscaped() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withEscape('!').withQuoteChar(null));
printer.print("a,b,c");
printer.print("xyz");
assertEquals("a!,b!,c,xyz", sw.toString());
printer.close();
}
@Test
public void testEOLEscaped() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null).withEscape('!'));
printer.print("a\rb\nc");
printer.print("x\fy\bz");
assertEquals("a!rb!nc,x\fy\bz", sw.toString());
printer.close();
}
@Test
public void testPlainPlain() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null));
printer.print("abc");
printer.print("xyz");
assertEquals("abc,xyz", sw.toString());
printer.close();
}
@Test
public void testDelimiterPlain() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null));
printer.print("a,b,c");
printer.print("xyz");
assertEquals("a,b,c,xyz", sw.toString());
printer.close();
}
@Test
public void testEOLPlain() throws IOException {
final StringWriter sw = new StringWriter();
final CSVPrinter printer = new CSVPrinter(sw, CSVFormat.DEFAULT.withQuoteChar(null));
printer.print("a\rb\nc");
printer.print("x\fy\bz");
assertEquals("a\rb\nc,x\fy\bz", sw.toString());
printer.close();
}
@Test(expected = IllegalArgumentException.class)
public void testInvalidFormat() throws Exception {
final CSVFormat invalidFormat = CSVFormat.DEFAULT.withDelimiter(CR);
new CSVPrinter(null, invalidFormat).close();
}
}
|
No need for explicitly creating an array. Arrays.asList is a varargs method.
git-svn-id: 77bd0fb3f0b1af3312cb764eaf99792cfc1ce6c4@1512621 13f79535-47bb-0310-9956-ffa450edef68
|
src/test/java/org/apache/commons/csv/CSVPrinterTest.java
|
No need for explicitly creating an array. Arrays.asList is a varargs method.
|
|
Java
|
apache-2.0
|
32f8a9bf072b7b2d464ab12b7f481e8382d5eec7
| 0
|
zhcet-amu/zhcet-web,zhcet-amu/zhcet-web,zhcet-amu/zhcet-web,zhcet-amu/zhcet-web,zhcet-amu/zhcet-web
|
package amu.zhcet.security;
import amu.zhcet.auth.login.persistent.PersistentTokenService;
import amu.zhcet.data.user.Role;
import amu.zhcet.firebase.auth.FirebaseAuthenticationProvider;
import amu.zhcet.firebase.auth.FirebaseAutheticationFilter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.autoconfigure.security.servlet.EndpointRequest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationDetailsSource;
import org.springframework.security.authentication.AuthenticationEventPublisher;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.dao.DaoAuthenticationProvider;
import org.springframework.security.config.BeanIds;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.session.SessionRegistry;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.security.web.authentication.WebAuthenticationDetails;
import javax.servlet.http.HttpServletRequest;
@Slf4j
@Configuration
@EnableWebSecurity
class SecurityConfiguration extends WebSecurityConfigurerAdapter {
private final PersistentTokenService persistentTokenService;
private final AuthenticationDetailsSource<HttpServletRequest, WebAuthenticationDetails> authenticationDetailsSource;
private final SessionRegistry sessionRegistry;
private final AuthenticationFailureHandler authenticationFailureHandler;
private final FirebaseAutheticationFilter firebaseAutheticationFilter;
@Autowired
public SecurityConfiguration(
PersistentTokenService persistentTokenService,
AuthenticationDetailsSource<HttpServletRequest, WebAuthenticationDetails> authenticationDetailsSource,
SessionRegistry sessionRegistry,
AuthenticationFailureHandler authenticationFailureHandler,
FirebaseAutheticationFilter firebaseAutheticationFilter) {
this.persistentTokenService = persistentTokenService;
this.authenticationDetailsSource = authenticationDetailsSource;
this.sessionRegistry = sessionRegistry;
this.authenticationFailureHandler = authenticationFailureHandler;
this.firebaseAutheticationFilter = firebaseAutheticationFilter;
}
@Autowired
public void configureFirebaseAuthentication(AuthenticationManagerBuilder authBuilder, FirebaseAuthenticationProvider firebaseAuthenticationProvider) {
authBuilder.authenticationProvider(firebaseAuthenticationProvider);
}
@Autowired
public void configureCustomAuthentication(AuthenticationManagerBuilder authBuilder, DaoAuthenticationProvider daoAuthenticationProvider) {
authBuilder.authenticationProvider(daoAuthenticationProvider);
}
@Autowired
public void configureEventPublisher(AuthenticationManagerBuilder authBuilder, AuthenticationEventPublisher authenticationEventPublisher) {
authBuilder.authenticationEventPublisher(authenticationEventPublisher);
}
@Override
@Bean(name = BeanIds.AUTHENTICATION_MANAGER)
public AuthenticationManager authenticationManagerBean() throws Exception {
return super.authenticationManagerBean();
}
@Override
protected void configure(HttpSecurity httpSecurity) throws Exception {
httpSecurity
.addFilterBefore(firebaseAutheticationFilter, UsernamePasswordAuthenticationFilter.class)
.authorizeRequests()
.requestMatchers(EndpointRequest.toAnyEndpoint())
.hasRole(Role.DEVELOPMENT_ADMIN.name())
.antMatchers("/profile/**").authenticated()
.antMatchers("/dashboard/student/**")
.hasAuthority(Role.STUDENT.toString())
.antMatchers("/dashboard/**").authenticated()
.antMatchers("/notifications/{id}/**")
.access("@permissionManager.checkNotificationRecipient(authentication, #id)")
.antMatchers("/notifications/**")
.authenticated()
.antMatchers("/management/notifications/{id}/**")
.access("@permissionManager.checkNotificationCreator(authentication, #id)")
.antMatchers("/management/**")
.hasAuthority(Role.TEACHING_STAFF.toString())
.antMatchers("/admin/dean/**")
.hasAuthority(Role.DEAN_ADMIN.toString())
.antMatchers("/admin/department/courses/{course}/**",
"/admin/department/floated/{course}/**",
"/admin/department/float/{course}/**")
.access("@permissionManager.checkCourse(authentication, #course)")
.antMatchers("/admin/department/{department}/**")
.access("@permissionManager.checkDepartment(authentication, #department)")
.antMatchers("/admin/department/**")
.hasAuthority(Role.DEPARTMENT_ADMIN.toString())
.antMatchers("/admin/faculty/**")
.hasAuthority(Role.FACULTY.toString())
.antMatchers("/").permitAll()
.and()
.formLogin()
.loginPage("/login").permitAll()
.authenticationDetailsSource(authenticationDetailsSource)
.failureHandler(authenticationFailureHandler)
.and()
.logout().permitAll()
.logoutSuccessUrl("/login?logout")
.and()
.rememberMe()
.rememberMeCookieName("zhcet-remember-me")
.tokenValiditySeconds(24 * 60 * 60)
.tokenRepository(persistentTokenService)
.and()
.sessionManagement()
.maximumSessions(1)
.sessionRegistry(sessionRegistry)
.expiredUrl("/login?expired");
}
}
|
src/main/java/amu/zhcet/security/SecurityConfiguration.java
|
package amu.zhcet.security;
import amu.zhcet.auth.login.persistent.PersistentTokenService;
import amu.zhcet.data.user.Role;
import amu.zhcet.firebase.auth.FirebaseAuthenticationProvider;
import amu.zhcet.firebase.auth.FirebaseAutheticationFilter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.autoconfigure.security.servlet.EndpointRequest;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationDetailsSource;
import org.springframework.security.authentication.AuthenticationEventPublisher;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.dao.DaoAuthenticationProvider;
import org.springframework.security.config.BeanIds;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.session.SessionRegistry;
import org.springframework.security.web.authentication.AuthenticationFailureHandler;
import org.springframework.security.web.authentication.UsernamePasswordAuthenticationFilter;
import org.springframework.security.web.authentication.WebAuthenticationDetails;
import javax.servlet.http.HttpServletRequest;
@Slf4j
@Configuration
@EnableWebSecurity
class SecurityConfiguration extends WebSecurityConfigurerAdapter {
private final PersistentTokenService persistentTokenService;
private final AuthenticationDetailsSource<HttpServletRequest, WebAuthenticationDetails> authenticationDetailsSource;
private final SessionRegistry sessionRegistry;
private final AuthenticationFailureHandler authenticationFailureHandler;
private final FirebaseAutheticationFilter firebaseAutheticationFilter;
@Autowired
public SecurityConfiguration(
PersistentTokenService persistentTokenService,
AuthenticationDetailsSource<HttpServletRequest, WebAuthenticationDetails> authenticationDetailsSource,
SessionRegistry sessionRegistry,
AuthenticationFailureHandler authenticationFailureHandler,
FirebaseAutheticationFilter firebaseAutheticationFilter) {
this.persistentTokenService = persistentTokenService;
this.authenticationDetailsSource = authenticationDetailsSource;
this.sessionRegistry = sessionRegistry;
this.authenticationFailureHandler = authenticationFailureHandler;
this.firebaseAutheticationFilter = firebaseAutheticationFilter;
}
@Autowired
public void configureFirebaseAuthentication(AuthenticationManagerBuilder authBuilder, FirebaseAuthenticationProvider firebaseAuthenticationProvider) {
authBuilder.authenticationProvider(firebaseAuthenticationProvider);
}
@Autowired
public void configureCustomAuthentication(AuthenticationManagerBuilder authBuilder, DaoAuthenticationProvider daoAuthenticationProvider) {
authBuilder.authenticationProvider(daoAuthenticationProvider);
}
@Autowired
public void configureEventPublisher(AuthenticationManagerBuilder authBuilder, AuthenticationEventPublisher authenticationEventPublisher) {
authBuilder.authenticationEventPublisher(authenticationEventPublisher);
}
@Override
@Bean(name = BeanIds.AUTHENTICATION_MANAGER)
public AuthenticationManager authenticationManagerBean() throws Exception {
return super.authenticationManagerBean();
}
@Override
protected void configure(HttpSecurity httpSecurity) throws Exception {
httpSecurity
.addFilterBefore(firebaseAutheticationFilter, UsernamePasswordAuthenticationFilter.class)
.authorizeRequests()
.requestMatchers(EndpointRequest.toAnyEndpoint())
.hasRole(Role.DEVELOPMENT_ADMIN.name())
.antMatchers("/").permitAll()
.antMatchers("/profile/**").authenticated()
.antMatchers("/dashboard/**").authenticated()
.antMatchers("/dashboard/student/**")
.hasAuthority(Role.STUDENT.toString())
.antMatchers("/notifications/{id}/**")
.access("@permissionManager.checkNotificationRecipient(authentication, #id)")
.antMatchers("/notifications/**")
.authenticated()
.antMatchers("/management/notifications/{id}/**")
.access("@permissionManager.checkNotificationCreator(authentication, #id)")
.antMatchers("/management/**")
.hasAuthority(Role.TEACHING_STAFF.toString())
.antMatchers("/admin/dean/**")
.hasAuthority(Role.DEAN_ADMIN.toString())
.antMatchers("/admin/department/courses/{course}/**",
"/admin/department/floated/{course}/**",
"/admin/department/float/{course}/**")
.access("@permissionManager.checkCourse(authentication, #course)")
.antMatchers("/admin/department/{department}/**")
.access("@permissionManager.checkDepartment(authentication, #department)")
.antMatchers("/admin/department/**")
.hasAuthority(Role.DEPARTMENT_ADMIN.toString())
.antMatchers("/admin/faculty/**")
.hasAuthority(Role.FACULTY.toString())
.and()
.formLogin()
.loginPage("/login").permitAll()
.authenticationDetailsSource(authenticationDetailsSource)
.failureHandler(authenticationFailureHandler)
.and()
.logout().permitAll()
.logoutSuccessUrl("/login?logout")
.and()
.rememberMe()
.rememberMeCookieName("zhcet-remember-me")
.tokenValiditySeconds(24 * 60 * 60)
.tokenRepository(persistentTokenService)
.and()
.sessionManagement()
.maximumSessions(1)
.sessionRegistry(sessionRegistry)
.expiredUrl("/login?expired");
}
}
|
fix: Unverified student cannot access dashboard (#198)
This bug was due to a more general URL being above a
specific URL which made it allow any authenticated user
to access /dashboard/** before checking that
/dashboard/student/** requires permission of STUDENT
|
src/main/java/amu/zhcet/security/SecurityConfiguration.java
|
fix: Unverified student cannot access dashboard (#198)
|
|
Java
|
apache-2.0
|
67be779d0d39c43d62999f0c756cdc828e0f2b14
| 0
|
bsideup/groovy-core,mariogarcia/groovy-core,ChanJLee/incubator-groovy,mariogarcia/groovy-core,adjohnson916/incubator-groovy,adjohnson916/incubator-groovy,kenzanmedia/incubator-groovy,ebourg/groovy-core,upadhyayap/incubator-groovy,aim-for-better/incubator-groovy,shils/groovy,kenzanmedia/incubator-groovy,apache/incubator-groovy,alien11689/incubator-groovy,rabbitcount/incubator-groovy,nkhuyu/incubator-groovy,jwagenleitner/groovy,adjohnson916/groovy-core,russel/groovy,bsideup/incubator-groovy,sagarsane/incubator-groovy,traneHead/groovy-core,PascalSchumacher/incubator-groovy,dpolivaev/groovy,i55ac/incubator-groovy,aim-for-better/incubator-groovy,jwagenleitner/groovy,upadhyayap/incubator-groovy,shils/incubator-groovy,traneHead/groovy-core,eginez/incubator-groovy,ebourg/groovy-core,jwagenleitner/incubator-groovy,paulk-asert/incubator-groovy,alien11689/groovy-core,christoph-frick/groovy-core,genqiang/incubator-groovy,armsargis/groovy,jwagenleitner/incubator-groovy,shils/incubator-groovy,armsargis/groovy,samanalysis/incubator-groovy,jwagenleitner/groovy,adjohnson916/incubator-groovy,nobeans/incubator-groovy,guangying945/incubator-groovy,sagarsane/groovy-core,sagarsane/groovy-core,paplorinc/incubator-groovy,avafanasiev/groovy,genqiang/incubator-groovy,mariogarcia/groovy-core,paplorinc/incubator-groovy,shils/groovy,aaronzirbes/incubator-groovy,yukangguo/incubator-groovy,PascalSchumacher/incubator-groovy,samanalysis/incubator-groovy,PascalSchumacher/incubator-groovy,russel/groovy,ChanJLee/incubator-groovy,christoph-frick/groovy-core,groovy/groovy-core,yukangguo/incubator-groovy,adjohnson916/groovy-core,avafanasiev/groovy,groovy/groovy-core,groovy/groovy-core,kidaa/incubator-groovy,i55ac/incubator-groovy,rabbitcount/incubator-groovy,apache/incubator-groovy,tkruse/incubator-groovy,bsideup/groovy-core,paulk-asert/groovy,genqiang/incubator-groovy,dpolivaev/groovy,paulk-asert/incubator-groovy,gillius/incubator-groovy,antoaravinth/incubator-groovy,adjohnson916/incubator-groovy,avafanasiev/groovy,paulk-asert/groovy,graemerocher/incubator-groovy,sagarsane/incubator-groovy,PascalSchumacher/incubator-groovy,nobeans/incubator-groovy,jwagenleitner/incubator-groovy,shils/incubator-groovy,kidaa/incubator-groovy,paplorinc/incubator-groovy,apache/groovy,mariogarcia/groovy-core,russel/groovy,sagarsane/groovy-core,pickypg/incubator-groovy,taoguan/incubator-groovy,i55ac/incubator-groovy,ebourg/groovy-core,ChanJLee/incubator-groovy,paulk-asert/incubator-groovy,christoph-frick/groovy-core,alien11689/groovy-core,guangying945/incubator-groovy,bsideup/incubator-groovy,eginez/incubator-groovy,paulk-asert/groovy,kidaa/incubator-groovy,adjohnson916/groovy-core,fpavageau/groovy,pledbrook/incubator-groovy,nkhuyu/incubator-groovy,aim-for-better/incubator-groovy,dpolivaev/groovy,paulk-asert/incubator-groovy,upadhyayap/incubator-groovy,apache/incubator-groovy,armsargis/groovy,traneHead/groovy-core,ChanJLee/incubator-groovy,rlovtangen/groovy-core,dpolivaev/groovy,jwagenleitner/incubator-groovy,sagarsane/incubator-groovy,aaronzirbes/incubator-groovy,alien11689/groovy-core,bsideup/groovy-core,aaronzirbes/incubator-groovy,EPadronU/incubator-groovy,rlovtangen/groovy-core,yukangguo/incubator-groovy,apache/groovy,nobeans/incubator-groovy,russel/incubator-groovy,EPadronU/incubator-groovy,antoaravinth/incubator-groovy,PascalSchumacher/incubator-groovy,aaronzirbes/incubator-groovy,mariogarcia/groovy-core,ebourg/incubator-groovy,bsideup/incubator-groovy,fpavageau/groovy,gillius/incubator-groovy,fpavageau/groovy,nobeans/incubator-groovy,rabbitcount/incubator-groovy,sagarsane/groovy-core,russel/incubator-groovy,kidaa/incubator-groovy,sagarsane/groovy-core,EPadronU/incubator-groovy,groovy/groovy-core,alien11689/incubator-groovy,taoguan/incubator-groovy,bsideup/groovy-core,rabbitcount/incubator-groovy,aim-for-better/incubator-groovy,apache/groovy,pledbrook/incubator-groovy,guangying945/incubator-groovy,paulk-asert/groovy,adjohnson916/groovy-core,guangying945/incubator-groovy,pickypg/incubator-groovy,pledbrook/incubator-groovy,upadhyayap/incubator-groovy,ebourg/incubator-groovy,traneHead/groovy-core,graemerocher/incubator-groovy,avafanasiev/groovy,samanalysis/incubator-groovy,gillius/incubator-groovy,adjohnson916/groovy-core,ebourg/groovy-core,rlovtangen/groovy-core,EPadronU/incubator-groovy,antoaravinth/incubator-groovy,paulk-asert/incubator-groovy,tkruse/incubator-groovy,russel/incubator-groovy,sagarsane/incubator-groovy,russel/incubator-groovy,fpavageau/groovy,i55ac/incubator-groovy,christoph-frick/groovy-core,christoph-frick/groovy-core,graemerocher/incubator-groovy,alien11689/groovy-core,apache/groovy,apache/incubator-groovy,eginez/incubator-groovy,genqiang/incubator-groovy,antoaravinth/incubator-groovy,gillius/incubator-groovy,eginez/incubator-groovy,rlovtangen/groovy-core,armsargis/groovy,ebourg/incubator-groovy,alien11689/groovy-core,tkruse/incubator-groovy,rlovtangen/groovy-core,ebourg/groovy-core,kenzanmedia/incubator-groovy,taoguan/incubator-groovy,jwagenleitner/groovy,graemerocher/incubator-groovy,alien11689/incubator-groovy,pledbrook/incubator-groovy,groovy/groovy-core,pickypg/incubator-groovy,pickypg/incubator-groovy,ebourg/incubator-groovy,paplorinc/incubator-groovy,shils/groovy,samanalysis/incubator-groovy,shils/incubator-groovy,shils/groovy,tkruse/incubator-groovy,alien11689/incubator-groovy,nkhuyu/incubator-groovy,russel/groovy,kenzanmedia/incubator-groovy,bsideup/incubator-groovy,yukangguo/incubator-groovy,nkhuyu/incubator-groovy,taoguan/incubator-groovy
|
/*
* Copyright 2003-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.vmplugin.v7;
import groovy.lang.GString;
import groovy.lang.GroovyObject;
import groovy.lang.GroovyRuntimeException;
import groovy.lang.GroovySystem;
import groovy.lang.MetaClass;
import groovy.lang.MetaClassImpl;
import groovy.lang.MetaMethod;
import java.lang.invoke.*;
import java.lang.invoke.MethodHandles.Lookup;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.math.BigInteger;
import org.codehaus.groovy.GroovyBugError;
import org.codehaus.groovy.reflection.CachedMethod;
import org.codehaus.groovy.runtime.NullObject;
import org.codehaus.groovy.runtime.ScriptBytecodeAdapter;
import org.codehaus.groovy.runtime.metaclass.DefaultMetaClassInfo;
import org.codehaus.groovy.runtime.metaclass.NewInstanceMetaMethod;
import org.codehaus.groovy.runtime.metaclass.ReflectionMetaMethod;
import org.codehaus.groovy.runtime.metaclass.DefaultMetaClassInfo.ConstantMetaClassVersioning;
import org.codehaus.groovy.runtime.wrappers.Wrapper;
/**
* Bytecode level interface for bootstrap methods used by invokedynamic.
*
* @author <a href="mailto:blackdrag@gmx.org">Jochen "blackdrag" Theodorou</a>
*/
public class IndyInterface {
/*
* notes:
* MethodHandles#dropArguments:
* invocation with (a,b,c), drop first 2 results in invocation
* with (a) only.
* MethodHandles#insertArguments:
* invocation with (a,b,c), insert (x,y) results in error.
* first need to add with addParameters (X,Y), then bind them with
* insert
*/
private static final MethodHandles.Lookup LOOKUP = MethodHandles.lookup();
private static final MethodHandle SELECT_METHOD;
static {
MethodType mt = MethodType.methodType(Object.class, MutableCallSite.class, Class.class, String.class, Boolean.class, Object.class, Object[].class);
try {
SELECT_METHOD = LOOKUP.findStatic(IndyInterface.class, "selectMethod", mt);
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
private static final MethodType GENERAL_INVOKER_SIGNATURE = MethodType.methodType(Object.class, Object.class, Object[].class);
private static final MethodType INVOKE_METHOD_SIGNATURE = MethodType.methodType(Object.class, Class.class, Object.class, String.class, Object[].class, boolean.class, boolean.class);
private static final MethodType O2O = MethodType.methodType(Object.class, Object.class);
private static final MethodHandle
UNWRAP_METHOD, TO_STRING, TO_BYTE,
TO_BIGINT, SAME_MC, IS_NULL,
IS_NOT_NULL, UNWRAP_EXCEPTION, SAME_CLASS,
META_METHOD_INVOKER;
static {
try {
UNWRAP_METHOD = LOOKUP.findStatic(IndyInterface.class, "unwrap", O2O);
TO_STRING = LOOKUP.findStatic(IndyInterface.class, "coerceToString", MethodType.methodType(String.class, Object.class));
TO_BYTE = LOOKUP.findStatic(IndyInterface.class, "coerceToByte", O2O);
TO_BIGINT = LOOKUP.findStatic(IndyInterface.class, "coerceToBigInt", O2O);
SAME_MC = LOOKUP.findStatic(IndyInterface.class, "isSameMetaClass", MethodType.methodType(boolean.class, MetaClassImpl.class, Object.class));
IS_NULL = LOOKUP.findStatic(IndyInterface.class, "isNull", MethodType.methodType(boolean.class, Object.class));
IS_NOT_NULL = LOOKUP.findStatic(IndyInterface.class, "isNotNull", MethodType.methodType(boolean.class, Object.class));
UNWRAP_EXCEPTION = LOOKUP.findStatic(IndyInterface.class, "unwrap", MethodType.methodType(Object.class, GroovyRuntimeException.class));
SAME_CLASS = LOOKUP.findStatic(IndyInterface.class, "sameClass", MethodType.methodType(boolean.class, Class.class, Object.class));
META_METHOD_INVOKER = LOOKUP.findVirtual(MetaMethod.class, "invoke", GENERAL_INVOKER_SIGNATURE);
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
private static final MethodHandle NULL_REF = MethodHandles.constant(Object.class, null);
private static final MethodHandle VALID_MC_VERSION;
static {
try {
VALID_MC_VERSION = LOOKUP.findVirtual(ConstantMetaClassVersioning.class, "isValid", MethodType.methodType(boolean.class));
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
public static CallSite bootstrap(Lookup caller, String name, MethodType type) {
return realBootstrap(caller, name, type, false);
}
public static CallSite bootstrapSafe(Lookup caller, String name, MethodType type) {
return realBootstrap(caller, name, type, true);
}
private static CallSite realBootstrap(Lookup caller, String name, MethodType type, boolean safe) {
// since indy does not give us the runtime types
// we produce first a dummy call site, which then changes the target to one,
// that does the method selection including the the direct call to the
// real method.
MutableCallSite mc = new MutableCallSite(type);
MethodHandle mh = makeFallBack(mc,caller.lookupClass(),name,type,safe);
mc.setTarget(mh);
return mc;
}
private static MethodHandle makeFallBack(MutableCallSite mc, Class<?> sender, String name, MethodType type, boolean safeNavigation) {
MethodHandle mh = SELECT_METHOD.
bindTo(mc).
bindTo(sender).
bindTo(name).
bindTo(safeNavigation).
asCollector(Object[].class, type.parameterCount()-1).
asType(type);
return mh;
}
private static Class getClass(Object x) {
if (x instanceof Class) return (Class) x;
return x.getClass();
}
private static MetaClass getMetaClass(Object receiver) {
if (receiver == null) {
return NullObject.getNullObject().getMetaClass();
} else if (receiver instanceof GroovyObject) {
return ((GroovyObject) receiver).getMetaClass();
} else {
return GroovySystem.getMetaClassRegistry().getMetaClass(getClass(receiver));
}
}
private static class CallInfo {
public Object[] args;
public MetaMethod method;
public MethodType targetType;
public String methodName;
public MethodHandle handle;
public boolean useMetaClass = false;
public MutableCallSite callSite;
public Class sender;
public boolean isVargs;
public boolean safeNavigation;
}
private static boolean isStatic(Method m) {
int mods = m.getModifiers();
return (mods & Modifier.STATIC) != 0;
}
private static void setHandleForMetaMethod(CallInfo info) {
MetaMethod metaMethod = info.method;
boolean isCategoryTypeMethod = metaMethod instanceof NewInstanceMetaMethod;
if (metaMethod instanceof ReflectionMetaMethod) {
ReflectionMetaMethod rmm = (ReflectionMetaMethod) metaMethod;
metaMethod = rmm.getCachedMethod();
}
if (metaMethod instanceof CachedMethod) {
CachedMethod cm = (CachedMethod) metaMethod;
info.isVargs = cm.isVargsMethod();
try {
Method m = cm.getCachedMethod();
info.handle = LOOKUP.unreflect(m);
if (!isCategoryTypeMethod && isStatic(m)) {
info.handle = MethodHandles.dropArguments(info.handle, 0, Class.class);
}
} catch (IllegalAccessException e) {
throw new GroovyBugError(e);
}
} else if (info.method != null) {
info.handle = META_METHOD_INVOKER;
info.handle = info.handle.bindTo(info.method);
if (info.method.getNativeParameterTypes().length==1 &&
info.args.length==1)
{
// the method expects a parameter but we don't provide an
// argument for that. So we give in a Object[], containing
// a null value
// since MethodHandles.insertArguments is a vargs method giving
// only the array would be like just giving a null value, so
// we need to wrap the array that represents our argument in
// another one for the vargs call
info.handle = MethodHandles.insertArguments(info.handle, 1, new Object[]{new Object[]{null}});
} else {
info.handle = info.handle.asCollector(Object[].class, info.targetType.parameterCount()-2);
}
}
}
private static void chooseMethod(MetaClass mc, CallInfo ci) {
if (!(mc instanceof MetaClassImpl)) {return;}
MetaClassImpl mci = (MetaClassImpl) mc;
Object receiver = ci.args[0];
if (receiver==null) {
receiver = NullObject.getNullObject();
}
if (receiver instanceof Class) {
ci.method = mci.retrieveStaticMethod(ci.methodName, removeRealReceiver(ci.args));
} else {
ci.method = mci.getMethodWithCaching(getClass(receiver), ci.methodName, removeRealReceiver(ci.args), false);
}
}
private static void setMetaClassCallHandleIfNedded(MetaClass mc, CallInfo ci) {
if (ci.handle!=null) return;
try {
ci.useMetaClass = true;
Object receiver = ci.args[0];
if (receiver instanceof Class) {
ci.handle = LOOKUP.findVirtual(mc.getClass(), "invokeStaticMethod", MethodType.methodType(Object.class, Object.class, String.class, Object[].class));
ci.handle = ci.handle.bindTo(mc);
} else {
ci.handle = LOOKUP.findVirtual(mc.getClass(), "invokeMethod", INVOKE_METHOD_SIGNATURE);
ci.handle = ci.handle.bindTo(mc).bindTo(ci.sender);
ci.handle = MethodHandles.insertArguments(ci.handle, ci.handle.type().parameterCount()-2, true, false);
}
ci.handle = MethodHandles.insertArguments(ci.handle, 1, ci.methodName);
ci.handle = ci.handle.asCollector(Object[].class, ci.targetType.parameterCount()-2);
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
/**
* called by handle
*/
public static Object unwrap(GroovyRuntimeException gre) throws Throwable {
throw ScriptBytecodeAdapter.unwrap(gre);
}
/**
* called by handle
*/
public static boolean isSameMetaClass(MetaClassImpl mc, Object receiver) {
//TODO: remove this method if possible by switchpoint usage
return receiver instanceof GroovyObject && mc==((GroovyObject)receiver).getMetaClass();
}
/**
* called by handle
*/
public static Object unwrap(Object o) {
Wrapper w = (Wrapper) o;
return w.unwrap();
}
/**
* called by handle
*/
public static String coerceToString(Object o) {
return o.toString();
}
/**
* called by handle
*/
public static Object coerceToByte(Object o) {
return new Byte(((Number) o).byteValue());
}
/**
* called by handle
*/
public static Object coerceToBigInt(Object o) {
return new BigInteger(String.valueOf((Number) o));
}
/**
* check for null - called by handle
*/
public static boolean isNull(Object o) {
return o == null;
}
/**
* check for != null - called by handle
*/
public static boolean isNotNull(Object o) {
return o != null;
}
/**
* called by handle
*/
public static boolean sameClass(Class c, Object o) {
if (o==null) return false;
return o.getClass() == c;
}
private static void correctWrapping(CallInfo ci) {
if (ci.useMetaClass) return;
Class[] pt = ci.handle.type().parameterArray();
for (int i=1; i<ci.args.length; i++) {
if (ci.args[i] instanceof Wrapper) {
Class type = pt[i];
MethodType mt = MethodType.methodType(type, Object.class);
ci.handle = MethodHandles.filterArguments(ci.handle, i, UNWRAP_METHOD.asType(mt));
}
}
}
private static void correctCoerce(CallInfo ci) {
if (ci.useMetaClass) return;
Class[] parameters = ci.handle.type().parameterArray();
if (ci.args.length != parameters.length) {
throw new GroovyBugError("at this point argument array length and parameter array length should be the same");
}
for (int i=1; i<ci.args.length; i++) {
Object arg = ci.args[i];
if (arg==null) continue;
Class got = arg.getClass();
if (arg instanceof GString && parameters[i] == String.class) {
ci.handle = MethodHandles.filterArguments(ci.handle, i, TO_STRING);
} else if (parameters[i] == Byte.class && got != Byte.class) {
ci.handle = MethodHandles.filterArguments(ci.handle, i, TO_BYTE);
} else if (parameters[i] == BigInteger.class && got != BigInteger.class) {
ci.handle = MethodHandles.filterArguments(ci.handle, i, TO_BIGINT);
}
}
}
private static void correctNullReceiver(CallInfo ci){
if (ci.args[0]!=null || ci.useMetaClass) return;
ci.handle = ci.handle.bindTo(NullObject.getNullObject());
ci.handle = MethodHandles.dropArguments(ci.handle, 0, ci.targetType.parameterType(1));
}
private static void dropDummyReceiver(CallInfo ci) {
ci.handle = MethodHandles.dropArguments(ci.handle, 0, Integer.class);
}
private static void setGuards(CallInfo ci, Object receiver) {
if (ci.handle==null) return;
MethodHandle fallback = makeFallBack(ci.callSite, ci.sender, ci.methodName, ci.targetType, ci.safeNavigation);
// special guards for receiver
MethodHandle test=null;
if (receiver instanceof GroovyObject) {
GroovyObject go = (GroovyObject) receiver;
MetaClassImpl mc = (MetaClassImpl) go.getMetaClass();
test = SAME_MC.bindTo(mc);
// drop dummy receiver
test = test.asType(MethodType.methodType(boolean.class,ci.targetType.parameterType(1)));
test = MethodHandles.dropArguments(test, 0, ci.targetType.parameterType(0));
} else if (receiver != null) {
// handle constant meta class
ConstantMetaClassVersioning mcv = DefaultMetaClassInfo.getCurrentConstantMetaClassVersioning();
test = VALID_MC_VERSION.bindTo(mcv);
ci.handle = MethodHandles.guardWithTest(test, ci.handle, fallback);
// check for not being null
test = IS_NOT_NULL.asType(MethodType.methodType(boolean.class,ci.targetType.parameterType(1)));
test = MethodHandles.dropArguments(test, 0, ci.targetType.parameterType(0));
}
if (test!=null) {
ci.handle = MethodHandles.guardWithTest(test, ci.handle, fallback);
}
// guards for receiver and parameter
Class[] pt = ci.handle.type().parameterArray();
for (int i=0; i<ci.args.length; i++) {
Object arg = ci.args[i];
if (arg==null) {
test = IS_NULL.asType(MethodType.methodType(boolean.class, pt[i+1]));
} else {
Class argClass = arg.getClass();
test = SAME_CLASS.
bindTo(argClass).
asType(MethodType.methodType(boolean.class, pt[i+1]));
}
Class[] drops = new Class[i+1];
for (int j=0; j<drops.length; j++) drops[j] = pt[j];
test = MethodHandles.dropArguments(test, 0, drops);
ci.handle = MethodHandles.guardWithTest(test, ci.handle, fallback);
}
}
private static void correctParameterLenth(CallInfo info) {
Class[] params = info.handle.type().parameterArray();
if (info.handle==null) return;
if (!info.isVargs) {
if (params.length != info.args.length) {
//TODO: add null argument
}
return;
}
Class lastParam = params[params.length-1];
Object lastArg = info.args[info.args.length-1];
if (params.length == info.args.length) {
// may need rewrap
if (lastParam == lastArg || lastArg == null) return;
if (lastParam.isInstance(lastArg)) return;
// arg is not null and not assignment compatible
// so we really need to rewrap
info.handle = info.handle.asCollector(lastParam, 1);
} else if (params.length > info.args.length) {
// we depend on the method selection having done a good
// job before already, so the only case for this here is, that
// we have no argument for the array, meaning params.length is
// args.length+1. In that case we have to fill in an empty array
info.handle = MethodHandles.insertArguments(info.handle, params.length-1, Array.newInstance(lastParam.getComponentType(), 0));
} else { //params.length < args.length
// we depend on the method selection having done a good
// job before already, so the only case for this here is, that
// all trailing arguments belong into the vargs array
info.handle = info.handle.asCollector(
lastParam,
info.args.length - params.length + 1);
}
}
private static void addExceptionHandler(CallInfo info) {
if (info.handle==null) return;
MethodType returnType = MethodType.methodType(info.handle.type().returnType(), GroovyRuntimeException.class);
info.handle = MethodHandles.catchException(info.handle, GroovyRuntimeException.class, UNWRAP_EXCEPTION.asType(returnType));
}
private static boolean setNullForSafeNavigation(CallInfo info) {
if (!info.safeNavigation) return false;
info.handle = MethodHandles.dropArguments(NULL_REF,0,info.targetType.parameterArray());
return true;
}
public static Object selectMethod(MutableCallSite callSite, Class sender, String methodName, Boolean safeNavigation, Object dummyReceiver, Object[] arguments) throws Throwable {
//TODO: handle GroovyInterceptable
CallInfo callInfo = new CallInfo();
callInfo.targetType = callSite.type();
callInfo.methodName = methodName;
callInfo.args = arguments;
callInfo.callSite = callSite;
callInfo.sender = sender;
callInfo.safeNavigation = safeNavigation && arguments[0]==null;
if (!setNullForSafeNavigation(callInfo)) {
// setInterceptableHandle(callInfo);
MetaClass mc = getMetaClass(callInfo.args[0]);
chooseMethod(mc, callInfo);
setHandleForMetaMethod(callInfo);
setMetaClassCallHandleIfNedded(mc, callInfo);
correctWrapping(callInfo);
correctParameterLenth(callInfo);
correctCoerce(callInfo);
correctNullReceiver(callInfo);
dropDummyReceiver(callInfo);
try {
callInfo.handle = callInfo.handle.asType(callInfo.targetType);
} catch (Exception e) {
System.err.println("ERROR while processing "+methodName);
throw e;
}
addExceptionHandler(callInfo);
}
setGuards(callInfo, callInfo.args[0]);
callSite.setTarget(callInfo.handle);
return callInfo.handle.invokeWithArguments(repack(dummyReceiver,callInfo.args));
}
private static Object[] repack(Object o, Object[] args) {
Object[] ar = new Object[args.length+1];
ar[0] = o;
for (int i=0; i<args.length; i++) {
ar[i+1] = args[i];
}
return ar;
}
private static Object[] removeRealReceiver(Object[] args) {
Object[] ar = new Object[args.length-1];
for (int i=1; i<args.length; i++) {
ar[i-1] = args[i];
}
return ar;
}
}
|
src/main/org/codehaus/groovy/vmplugin/v7/IndyInterface.java
|
/*
* Copyright 2003-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.vmplugin.v7;
import groovy.lang.GString;
import groovy.lang.GroovyObject;
import groovy.lang.GroovyRuntimeException;
import groovy.lang.GroovySystem;
import groovy.lang.MetaClass;
import groovy.lang.MetaClassImpl;
import groovy.lang.MetaMethod;
import java.lang.invoke.*;
import java.lang.invoke.MethodHandles.Lookup;
import java.lang.reflect.Array;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.math.BigInteger;
import org.codehaus.groovy.GroovyBugError;
import org.codehaus.groovy.reflection.CachedMethod;
import org.codehaus.groovy.runtime.NullObject;
import org.codehaus.groovy.runtime.ScriptBytecodeAdapter;
import org.codehaus.groovy.runtime.metaclass.DefaultMetaClassInfo;
import org.codehaus.groovy.runtime.metaclass.NewInstanceMetaMethod;
import org.codehaus.groovy.runtime.metaclass.ReflectionMetaMethod;
import org.codehaus.groovy.runtime.metaclass.DefaultMetaClassInfo.ConstantMetaClassVersioning;
import org.codehaus.groovy.runtime.wrappers.Wrapper;
/**
* Bytecode level interface for bootstrap methods used by invokedynamic.
*
* @author <a href="mailto:blackdrag@gmx.org">Jochen "blackdrag" Theodorou</a>
*/
public class IndyInterface {
/*
* notes:
* MethodHandles#dropArguments:
* invocation with (a,b,c), drop first 2 results in invocation
* with (a) only.
* MethodHandles#insertArguments:
* invocation with (a,b,c), insert (x,y) results in error.
* first need to add with addParameters (X,Y), then bind them with
* insert
*/
private static final MethodHandles.Lookup LOOKUP = MethodHandles.lookup();
private static final MethodHandle SELECT_METHOD;
static {
MethodType mt = MethodType.methodType(Object.class, MutableCallSite.class, Class.class, String.class, Boolean.class, Object.class, Object[].class);
try {
SELECT_METHOD = LOOKUP.findStatic(IndyInterface.class, "selectMethod", mt);
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
private static final MethodType GENERAL_INVOKER_SIGNATURE = MethodType.methodType(Object.class, Object.class, Object[].class);
private static final MethodType INVOKE_METHOD_SIGNATURE = MethodType.methodType(Object.class, Class.class, Object.class, String.class, Object[].class, boolean.class, boolean.class);
private static final MethodType O2O = MethodType.methodType(Object.class, Object.class);
private static final MethodHandle
UNWRAP_METHOD, TO_STRING, TO_BYTE,
TO_BIGINT, SAME_MC, IS_NULL,
IS_NOT_NULL, UNWRAP_EXCEPTION, SAME_CLASS,
META_METHOD_INVOKER;
static {
try {
UNWRAP_METHOD = LOOKUP.findStatic(IndyInterface.class, "unwrap", O2O);
TO_STRING = LOOKUP.findStatic(IndyInterface.class, "coerceToString", MethodType.methodType(String.class, Object.class));
TO_BYTE = LOOKUP.findStatic(IndyInterface.class, "coerceToByte", O2O);
TO_BIGINT = LOOKUP.findStatic(IndyInterface.class, "coerceToBigInt", O2O);
SAME_MC = LOOKUP.findStatic(IndyInterface.class, "isSameMetaClass", MethodType.methodType(boolean.class, MetaClassImpl.class, Object.class));
IS_NULL = LOOKUP.findStatic(IndyInterface.class, "isNull", MethodType.methodType(boolean.class, Object.class));
IS_NOT_NULL = LOOKUP.findStatic(IndyInterface.class, "isNotNull", MethodType.methodType(boolean.class, Object.class));
UNWRAP_EXCEPTION = LOOKUP.findStatic(IndyInterface.class, "unwrap", MethodType.methodType(Object.class, GroovyRuntimeException.class));
SAME_CLASS = LOOKUP.findStatic(IndyInterface.class, "sameClass", MethodType.methodType(boolean.class, Class.class, Object.class));
META_METHOD_INVOKER = LOOKUP.findVirtual(MetaMethod.class, "invoke", GENERAL_INVOKER_SIGNATURE);
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
private static final MethodHandle NULL_REF = MethodHandles.constant(Object.class, null);
private static final MethodHandle NULLOBJECT_REF = MethodHandles.constant(NullObject.class, NullObject.getNullObject());
private static final MethodHandle VALID_MC_VERSION;
static {
try {
VALID_MC_VERSION = LOOKUP.findVirtual(ConstantMetaClassVersioning.class, "isValid", MethodType.methodType(boolean.class));
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
public static CallSite bootstrap(Lookup caller, String name, MethodType type) {
return realBootstrap(caller, name, type, false);
}
public static CallSite bootstrapSafe(Lookup caller, String name, MethodType type) {
return realBootstrap(caller, name, type, true);
}
private static CallSite realBootstrap(Lookup caller, String name, MethodType type, boolean safe) {
// since indy does not give us the runtime types
// we produce first a dummy call site, which then changes the target to one,
// that does the method selection including the the direct call to the
// real method.
MutableCallSite mc = new MutableCallSite(type);
MethodHandle mh = makeFallBack(mc,caller.lookupClass(),name,type,safe);
mc.setTarget(mh);
return mc;
}
private static MethodHandle makeFallBack(MutableCallSite mc, Class<?> sender, String name, MethodType type, boolean safeNavigation) {
MethodHandle mh = SELECT_METHOD.
bindTo(mc).
bindTo(sender).
bindTo(name).
bindTo(safeNavigation).
asCollector(Object[].class, type.parameterCount()-1).
asType(type);
return mh;
}
private static Class getClass(Object x) {
if (x instanceof Class) return (Class) x;
return x.getClass();
}
private static MetaClass getMetaClass(Object receiver) {
if (receiver == null) {
return NullObject.getNullObject().getMetaClass();
} else if (receiver instanceof GroovyObject) {
return ((GroovyObject) receiver).getMetaClass();
} else {
return GroovySystem.getMetaClassRegistry().getMetaClass(getClass(receiver));
}
}
private static class CallInfo {
public Object[] args;
public MetaMethod method;
public MethodType targetType;
public String methodName;
public MethodHandle handle;
public boolean useMetaClass = false;
public MutableCallSite callSite;
public Class sender;
public boolean isVargs;
public boolean safeNavigation;
}
private static boolean isStatic(Method m) {
int mods = m.getModifiers();
return (mods & Modifier.STATIC) != 0;
}
private static void setHandleForMetaMethod(CallInfo info) {
MetaMethod metaMethod = info.method;
boolean isCategoryTypeMethod = metaMethod instanceof NewInstanceMetaMethod;
if (metaMethod instanceof ReflectionMetaMethod) {
ReflectionMetaMethod rmm = (ReflectionMetaMethod) metaMethod;
metaMethod = rmm.getCachedMethod();
}
if (metaMethod instanceof CachedMethod) {
CachedMethod cm = (CachedMethod) metaMethod;
info.isVargs = cm.isVargsMethod();
try {
Method m = cm.getCachedMethod();
info.handle = LOOKUP.unreflect(m);
if (!isCategoryTypeMethod && isStatic(m)) {
info.handle = MethodHandles.dropArguments(info.handle, 0, Class.class);
}
} catch (IllegalAccessException e) {
throw new GroovyBugError(e);
}
} else if (info.method != null) {
info.handle = META_METHOD_INVOKER;
info.handle = info.handle.bindTo(info.method);
if (info.method.getNativeParameterTypes().length==1 &&
info.args.length==1)
{
// the method expects a parameter but we don't provide an
// argument for that. So we give in a Object[], containing
// a null value
// since MethodHandles.insertArguments is a vargs method giving
// only the array would be like just giving a null value, so
// we need to wrap the array that represents our argument in
// another one for the vargs call
info.handle = MethodHandles.insertArguments(info.handle, 1, new Object[]{new Object[]{null}});
} else {
info.handle = info.handle.asCollector(Object[].class, info.targetType.parameterCount()-2);
}
}
}
private static void chooseMethod(MetaClass mc, CallInfo ci) {
if (!(mc instanceof MetaClassImpl)) {return;}
MetaClassImpl mci = (MetaClassImpl) mc;
Object receiver = ci.args[0];
if (receiver==null) {
receiver = NullObject.getNullObject();
}
if (receiver instanceof Class) {
ci.method = mci.retrieveStaticMethod(ci.methodName, removeRealReceiver(ci.args));
} else {
ci.method = mci.getMethodWithCaching(getClass(receiver), ci.methodName, removeRealReceiver(ci.args), false);
}
}
private static void setMetaClassCallHandleIfNedded(MetaClass mc, CallInfo ci) {
if (ci.handle!=null) return;
try {
ci.useMetaClass = true;
Object receiver = ci.args[0];
if (receiver instanceof Class) {
ci.handle = LOOKUP.findVirtual(mc.getClass(), "invokeStaticMethod", MethodType.methodType(Object.class, Object.class, String.class, Object[].class));
ci.handle = ci.handle.bindTo(mc);
} else {
ci.handle = LOOKUP.findVirtual(mc.getClass(), "invokeMethod", INVOKE_METHOD_SIGNATURE);
ci.handle = ci.handle.bindTo(mc).bindTo(ci.sender);
ci.handle = MethodHandles.insertArguments(ci.handle, ci.handle.type().parameterCount()-2, true, false);
}
ci.handle = MethodHandles.insertArguments(ci.handle, 1, ci.methodName);
ci.handle = ci.handle.asCollector(Object[].class, ci.targetType.parameterCount()-2);
} catch (Exception e) {
throw new GroovyBugError(e);
}
}
/**
* called by handle
*/
public static Object unwrap(GroovyRuntimeException gre) throws Throwable {
throw ScriptBytecodeAdapter.unwrap(gre);
}
/**
* called by handle
*/
public static boolean isSameMetaClass(MetaClassImpl mc, Object receiver) {
//TODO: remove this method if possible by switchpoint usage
return receiver instanceof GroovyObject && mc==((GroovyObject)receiver).getMetaClass();
}
/**
* called by handle
*/
public static Object unwrap(Object o) {
Wrapper w = (Wrapper) o;
return w.unwrap();
}
/**
* called by handle
*/
public static String coerceToString(Object o) {
return o.toString();
}
/**
* called by handle
*/
public static Object coerceToByte(Object o) {
return new Byte(((Number) o).byteValue());
}
/**
* called by handle
*/
public static Object coerceToBigInt(Object o) {
return new BigInteger(String.valueOf((Number) o));
}
/**
* check for null - called by handle
*/
public static boolean isNull(Object o) {
return o == null;
}
/**
* check for != null - called by handle
*/
public static boolean isNotNull(Object o) {
return o != null;
}
/**
* called by handle
*/
public static boolean sameClass(Class c, Object o) {
if (o==null) return false;
return o.getClass() == c;
}
private static void correctWrapping(CallInfo ci) {
if (ci.useMetaClass) return;
Class[] pt = ci.handle.type().parameterArray();
for (int i=1; i<ci.args.length; i++) {
if (ci.args[i] instanceof Wrapper) {
Class type = pt[i];
MethodType mt = MethodType.methodType(type, Object.class);
ci.handle = MethodHandles.filterArguments(ci.handle, i, UNWRAP_METHOD.asType(mt));
}
}
}
private static void correctCoerce(CallInfo ci) {
if (ci.useMetaClass) return;
Class[] parameters = ci.handle.type().parameterArray();
if (ci.args.length != parameters.length) {
throw new GroovyBugError("at this point argument array length and parameter array length should be the same");
}
for (int i=1; i<ci.args.length; i++) {
Object arg = ci.args[i];
if (arg==null) continue;
Class got = arg.getClass();
if (arg instanceof GString && parameters[i] == String.class) {
ci.handle = MethodHandles.filterArguments(ci.handle, i, TO_STRING);
} else if (parameters[i] == Byte.class && got != Byte.class) {
ci.handle = MethodHandles.filterArguments(ci.handle, i, TO_BYTE);
} else if (parameters[i] == BigInteger.class && got != BigInteger.class) {
ci.handle = MethodHandles.filterArguments(ci.handle, i, TO_BIGINT);
}
}
}
private static void correctNullReceiver(CallInfo ci){
if (ci.args[0]!=null || ci.useMetaClass) return;
MethodHandle nullReceiverDroppingHandle = MethodHandles.dropArguments(NULLOBJECT_REF, 0, ci.handle.type().parameterType(0));
ci.handle = MethodHandles.filterArguments(ci.handle, 0, nullReceiverDroppingHandle);
}
private static void dropDummyReceiver(CallInfo ci) {
ci.handle = MethodHandles.dropArguments(ci.handle, 0, Integer.class);
}
private static void setGuards(CallInfo ci, Object receiver) {
if (ci.handle==null) return;
MethodHandle fallback = makeFallBack(ci.callSite, ci.sender, ci.methodName, ci.targetType, ci.safeNavigation);
// special guards for receiver
MethodHandle test;
if (receiver instanceof GroovyObject) {
GroovyObject go = (GroovyObject) receiver;
MetaClassImpl mc = (MetaClassImpl) go.getMetaClass();
test = SAME_MC.bindTo(mc);
// drop dummy receiver
test = test.asType(MethodType.methodType(boolean.class,ci.targetType.parameterType(1)));
test = MethodHandles.dropArguments(test, 0, ci.targetType.parameterType(0));
} else {
// handle constant meta class
ConstantMetaClassVersioning mcv = DefaultMetaClassInfo.getCurrentConstantMetaClassVersioning();
test = VALID_MC_VERSION.bindTo(mcv);
ci.handle = MethodHandles.guardWithTest(test, ci.handle, fallback);
// check for not being null
test = IS_NOT_NULL.asType(MethodType.methodType(boolean.class,ci.targetType.parameterType(1)));
test = MethodHandles.dropArguments(test, 0, ci.targetType.parameterType(0));
}
ci.handle = MethodHandles.guardWithTest(test, ci.handle, fallback);
// guards for receiver and parameter
Class[] pt = ci.handle.type().parameterArray();
for (int i=0; i<ci.args.length; i++) {
Object arg = ci.args[i];
if (arg==null) {
test = IS_NULL.asType(MethodType.methodType(boolean.class, pt[i+1]));
} else {
Class argClass = arg.getClass();
test = SAME_CLASS.
bindTo(argClass).
asType(MethodType.methodType(boolean.class, pt[i+1]));
}
Class[] drops = new Class[i+1];
for (int j=0; j<drops.length; j++) drops[j] = pt[j];
test = MethodHandles.dropArguments(test, 0, drops);
ci.handle = MethodHandles.guardWithTest(test, ci.handle, fallback);
}
}
private static void correctParameterLenth(CallInfo info) {
Class[] params = info.handle.type().parameterArray();
if (info.handle==null) return;
if (!info.isVargs) {
if (params.length != info.args.length) {
//TODO: add null argument
}
return;
}
Class lastParam = params[params.length-1];
Object lastArg = info.args[info.args.length-1];
if (params.length == info.args.length) {
// may need rewrap
if (lastParam == lastArg || lastArg == null) return;
if (lastParam.isInstance(lastArg)) return;
// arg is not null and not assignment compatible
// so we really need to rewrap
info.handle = info.handle.asCollector(lastParam, 1);
} else if (params.length > info.args.length) {
// we depend on the method selection having done a good
// job before already, so the only case for this here is, that
// we have no argument for the array, meaning params.length is
// args.length+1. In that case we have to fill in an empty array
info.handle = MethodHandles.insertArguments(info.handle, params.length-1, Array.newInstance(lastParam.getComponentType(), 0));
} else { //params.length < args.length
// we depend on the method selection having done a good
// job before already, so the only case for this here is, that
// all trailing arguments belong into the vargs array
info.handle = info.handle.asCollector(
lastParam,
info.args.length - params.length + 1);
}
}
private static void addExceptionHandler(CallInfo info) {
if (info.handle==null) return;
MethodType returnType = MethodType.methodType(info.handle.type().returnType(), GroovyRuntimeException.class);
info.handle = MethodHandles.catchException(info.handle, GroovyRuntimeException.class, UNWRAP_EXCEPTION.asType(returnType));
}
private static boolean setNullForSafeNavigation(CallInfo info) {
if (!info.safeNavigation) return false;
info.handle = MethodHandles.dropArguments(NULL_REF,0,info.targetType.parameterArray());
return true;
}
public static Object selectMethod(MutableCallSite callSite, Class sender, String methodName, Boolean safeNavigation, Object dummyReceiver, Object[] arguments) throws Throwable {
//TODO: handle GroovyInterceptable
CallInfo callInfo = new CallInfo();
callInfo.targetType = callSite.type();
callInfo.methodName = methodName;
callInfo.args = arguments;
callInfo.callSite = callSite;
callInfo.sender = sender;
callInfo.safeNavigation = safeNavigation && arguments[0]==null;
if (!setNullForSafeNavigation(callInfo)) {
// setInterceptableHandle(callInfo);
MetaClass mc = getMetaClass(callInfo.args[0]);
chooseMethod(mc, callInfo);
setHandleForMetaMethod(callInfo);
setMetaClassCallHandleIfNedded(mc, callInfo);
correctWrapping(callInfo);
correctParameterLenth(callInfo);
correctCoerce(callInfo);
correctNullReceiver(callInfo);
dropDummyReceiver(callInfo);
try {
callInfo.handle = callInfo.handle.asType(callInfo.targetType);
} catch (Exception e) {
System.err.println("ERROR while processing "+methodName);
throw e;
}
addExceptionHandler(callInfo);
}
setGuards(callInfo, callInfo.args[0]);
callSite.setTarget(callInfo.handle);
return callInfo.handle.invokeWithArguments(repack(dummyReceiver,callInfo.args));
}
private static Object[] repack(Object o, Object[] args) {
Object[] ar = new Object[args.length+1];
ar[0] = o;
for (int i=0; i<args.length; i++) {
ar[i+1] = args[i];
}
return ar;
}
private static Object[] removeRealReceiver(Object[] args) {
Object[] ar = new Object[args.length-1];
for (int i=1; i<args.length; i++) {
ar[i-1] = args[i];
}
return ar;
}
}
|
exchange buggy null receiver replacement code and add additional null receiver check to exclude isNotNull test to avoid the guards invalidating the call site all the time
|
src/main/org/codehaus/groovy/vmplugin/v7/IndyInterface.java
|
exchange buggy null receiver replacement code and add additional null receiver check to exclude isNotNull test to avoid the guards invalidating the call site all the time
|
|
Java
|
apache-2.0
|
813e9164e3bf0e8b0807563968a0cba348ab6984
| 0
|
kakotor/charity_sale,kakotor/charity_sale,kakotor/charity_sale,kakotor/charity_sale,kakotor/charity_sale,kakotor/charity_sale,kakotor/charity_sale
|
package com.quyeying.charity.goods.service;
import com.quyeying.charity.domain.Goods;
import com.quyeying.charity.goods.dto.SaleMoneyDto;
import com.quyeying.charity.goods.dto.ValueDto;
import com.quyeying.charity.report.dto.ReportDto;
import com.quyeying.framework.db.BaseRepository;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
/**
* User: bysun
* Date: 2014/7/29
* Time: 17:20
*/
@SuppressWarnings("UnusedDeclaration")
public class GoodsRepositoryImpl extends BaseRepository implements GoodsRepositoryCustom {
public Page<Goods> findByDto(ReportDto dto) {
Criteria criteria = new Criteria();
if (null != dto.getSearch() && StringUtils.isNotBlank(dto.getSearch().getValue())) {
criteria.andOperator(Criteria.where("goodsNum").regex("^" + dto.getGoodsType() + dto.getSearch().getValue().toUpperCase() + ".*"));
}
return super.baseQuery(Goods.class, criteria, dto);
}
@Override
public SaleMoneyDto findSaleMoney(String... goodsType) {
SaleMoneyDto dto = new SaleMoneyDto();
if(0== mongo.count(new Query(),Goods.class)){
dto.setTotalSaleMoney(0);
dto.getGroupSaleMoney().put("A", 0);
dto.getGroupSaleMoney().put("B", 0);
dto.getGroupSaleMoney().put("C", 0);
dto.getGroupSaleMoney().put("D", 0);
dto.getGroupSaleMoney().put("E", 0);
return dto;
}
Query query = new Query();
if (goodsType.length > 0) query.addCriteria(Criteria.where("goodsType").in(goodsType));
MapReduceResults<ValueDto> result = mongo.mapReduce(
query,
"goods",
"classpath:mongo/salemoney/map.js",
"classpath:mongo/salemoney/reduce.js",
ValueDto.class
);
int total = 0;
for (ValueDto valueDto : result) {
dto.getGroupSaleMoney().put(valueDto.getId(), valueDto.getValue());
total += valueDto.getValue();
}
dto.setTotalSaleMoney(total);
return dto;
}
@Override
@Autowired
public void setMongo(MongoTemplate mongo) {
super.mongo = mongo;
}
}
|
src/main/java/com/quyeying/charity/goods/service/GoodsRepositoryImpl.java
|
package com.quyeying.charity.goods.service;
import com.quyeying.charity.domain.Goods;
import com.quyeying.charity.goods.dto.SaleMoneyDto;
import com.quyeying.charity.goods.dto.ValueDto;
import com.quyeying.charity.report.dto.ReportDto;
import com.quyeying.framework.db.BaseRepository;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
/**
* User: bysun
* Date: 2014/7/29
* Time: 17:20
*/
@SuppressWarnings("UnusedDeclaration")
public class GoodsRepositoryImpl extends BaseRepository implements GoodsRepositoryCustom {
public Page<Goods> findByDto(ReportDto dto) {
Criteria criteria = new Criteria();
if (StringUtils.isNotBlank(dto.getGoodsType())) {
criteria.andOperator(Criteria.where("goodsType").is(dto.getGoodsType()));
}
if (null != dto.getSearch() && StringUtils.isNotBlank(dto.getSearch().getValue())) {
criteria.andOperator(Criteria.where("goodsNum").regex("^" + dto.getGoodsType() + dto.getSearch().getValue().toUpperCase() + ".*"));
}
return super.baseQuery(Goods.class, criteria, dto);
}
@Override
public SaleMoneyDto findSaleMoney(String... goodsType) {
SaleMoneyDto dto = new SaleMoneyDto();
if(0== mongo.count(new Query(),Goods.class)){
dto.setTotalSaleMoney(0);
dto.getGroupSaleMoney().put("A", 0);
dto.getGroupSaleMoney().put("B", 0);
dto.getGroupSaleMoney().put("C", 0);
dto.getGroupSaleMoney().put("D", 0);
dto.getGroupSaleMoney().put("E", 0);
return dto;
}
Query query = new Query();
if (goodsType.length > 0) query.addCriteria(Criteria.where("goodsType").in(goodsType));
MapReduceResults<ValueDto> result = mongo.mapReduce(
query,
"goods",
"classpath:mongo/salemoney/map.js",
"classpath:mongo/salemoney/reduce.js",
ValueDto.class
);
int total = 0;
for (ValueDto valueDto : result) {
dto.getGroupSaleMoney().put(valueDto.getId(), valueDto.getValue());
total += valueDto.getValue();
}
dto.setTotalSaleMoney(total);
return dto;
}
@Override
@Autowired
public void setMongo(MongoTemplate mongo) {
super.mongo = mongo;
}
}
|
1.修复小组查询时异常
|
src/main/java/com/quyeying/charity/goods/service/GoodsRepositoryImpl.java
|
1.修复小组查询时异常
|
|
Java
|
apache-2.0
|
02e767a8163de26178b1fda36ca429f7c0bf3af0
| 0
|
xiaojinzi123/Component,xiaojinzi123/Component,xiaojinzi123/Component
|
package com.xiaojinzi.component;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.TypeName;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
/**
* 基础的注解驱动器,帮助获取一些常用的信息
* time : 2018/12/26
*
* @author : xiaojinzi
*/
public abstract class BaseProcessor extends AbstractProcessor {
public static final String NORMALLINE = "---------------------------";
/*当没有配置 host 信息或者配置错误的异常*/
public static final RuntimeException NULLHOSTEXCEPTION = new RuntimeException("the host must not be null,you must define host in build.gradle file,such as:\n\n" +
"defaultConfig {\n" +
" minSdkVersion 14\n" +
" targetSdkVersion 27\n" +
" versionCode 1\n" +
" versionName \"1.0\"\n\n" +
" javaCompileOptions {\n" +
" annotationProcessorOptions {\n" +
" arguments = [HOST: \"user\"]\n" +
" }\n" +
" }\n" +
"}\n \n");
/*当没有依赖实现库的异常*/
public static final RuntimeException IMPL_NOT_BE_DEPENDENT_ON = new RuntimeException(
"Did your module depend on component-impl or component-impl-rx? \n" +
"if your project is androidx, please config \n" +
"api 'com.github.xiaojinzi123.Component:component-impl:<version>-androidx' \n" +
"or\n" +
"api 'com.github.xiaojinzi123.Component:component-impl-rx:<version>-androidx' \n" +
"Otherwise, config \n" +
"api 'com.github.xiaojinzi123.Component:component-impl:<version>' \n" +
"or\n" +
"api 'com.github.xiaojinzi123.Component:component-impl-rx:<version>' \n" +
"<version> is the last version of Component, don't forget to replace" +
"see https://github.com/xiaojinzi123/Component/releases"
);
protected String routerDocFolder = null;
protected boolean routerDocEnable;
protected Filer mFiler;
protected Messager mMessager;
protected Types mTypes;
protected Elements mElements;
protected TypeElement mTypeElementComponentGeneratedAnno;
protected TypeElement mTypeElementString;
protected TypeElement mTypeElementInteger;
protected TypeElement mTypeElementList;
protected TypeElement mTypeElementArrayList;
protected TypeElement mTypeElementSparseArray;
protected TypeElement mTypeElementHashMap;
protected TypeElement mTypeElementHashSet;
protected ClassName mClassNameComponentGeneratedAnno;
protected ClassName mClassNameString;
protected ClassName mClassNameList;
protected ClassName mClassNameArrayList;
protected ClassName mClassNameSparseArray;
protected ClassName mClassNameHashMap;
protected ClassName mClassNameHashSet;
protected ClassName mClassNameKeep;
protected ClassName mClassNameNonNull;
protected TypeName mTypeNameString;
@Override
public synchronized void init(ProcessingEnvironment processingEnvironment) {
super.init(processingEnvironment);
Map<String, String> options = processingEnv.getOptions();
if (options != null) {
routerDocFolder = options.get("RouterDocFolder");
routerDocEnable = Boolean.parseBoolean(options.get("RouterDocEnable"));
}
mFiler = processingEnv.getFiler();
mMessager = processingEnv.getMessager();
mTypes = processingEnv.getTypeUtils();
mElements = processingEnv.getElementUtils();
try {
mTypeElementComponentGeneratedAnno = mElements.getTypeElement(ComponentConstants.COMPONENT_GENERATED_ANNO_CLASS_NAME);
mClassNameComponentGeneratedAnno = ClassName.get(mTypeElementComponentGeneratedAnno);
} catch (Exception e) {
throw IMPL_NOT_BE_DEPENDENT_ON;
}
mTypeElementString = mElements.getTypeElement(ComponentConstants.JAVA_STRING);
mTypeElementInteger = mElements.getTypeElement(ComponentConstants.JAVA_INTEGER);
mTypeElementList = mElements.getTypeElement(ComponentConstants.JAVA_LIST);
mTypeElementArrayList = mElements.getTypeElement(ComponentConstants.JAVA_ARRAYLIST);
mTypeElementSparseArray = mElements.getTypeElement(ComponentConstants.ANDROID_SPARSEARRAY);
mTypeElementHashMap = mElements.getTypeElement(ComponentConstants.JAVA_HASHMAP);
mTypeElementHashSet = mElements.getTypeElement(ComponentConstants.JAVA_HASHSET);
mClassNameString = ClassName.get(mTypeElementString);
mClassNameList = ClassName.get(mTypeElementList);
mClassNameArrayList = ClassName.get(mTypeElementArrayList);
mClassNameSparseArray = ClassName.get(mTypeElementSparseArray);
mClassNameHashMap = ClassName.get(mTypeElementHashMap);
mClassNameHashSet = ClassName.get(mTypeElementHashSet);
mTypeNameString = TypeName.get(mTypeElementString.asType());
TypeElement keepTypeElement = mElements.getTypeElement(ComponentConstants.ANDROID_ANNOTATION_KEEP);
TypeElement nonNullTypeElement = mElements.getTypeElement(ComponentConstants.ANDROID_ANNOTATION_NONNULL);
// androidx 和 非 androidx 的两个注解
mClassNameKeep = ClassName.get(keepTypeElement);
mClassNameNonNull = ClassName.get(nonNullTypeElement);
if (mClassNameKeep == null || mClassNameNonNull == null) {
String addDependencyTip = getAddDependencyTip(Arrays.asList(
ComponentConstants.ANDROID_ANNOTATION_KEEP,
ComponentConstants.ANDROID_ANNOTATION_NONNULL
), true);
throw new ProcessException(addDependencyTip + " \nif you add dependency already, then your configuration is wrong. " +
"If you use androidx, see https://github.com/xiaojinzi123/Component/wiki/%E4%BE%9D%E8%B5%96%E5%92%8C%E9%85%8D%E7%BD%AE-AndroidX " +
"\n else see https://github.com/xiaojinzi123/Component/wiki/%E4%BE%9D%E8%B5%96%E5%92%8C%E9%85%8D%E7%BD%AE");
}
}
protected String getAddDependencyTip(List<String> classPathList, boolean isOr) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < classPathList.size(); i++) {
String classPath = classPathList.get(i);
if (i == 0) {
sb.append("'")
.append(classPath)
.append("'");
} else {
sb.append(" ").append(isOr ? "or" : "and").append(" ").append("'").append(classPath).append("'");
}
}
sb.append(" ").append("can't be found, did you add dependency to build.gradle?");
return sb.toString();
}
protected boolean isRouterDocEnable() {
return routerDocEnable && (routerDocFolder != null && !routerDocFolder.isEmpty());
}
}
|
ComponentCompiler/src/main/java/com/xiaojinzi/component/BaseProcessor.java
|
package com.xiaojinzi.component;
import com.squareup.javapoet.ClassName;
import com.squareup.javapoet.TypeName;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.element.TypeElement;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
/**
* 基础的注解驱动器,帮助获取一些常用的信息
* time : 2018/12/26
*
* @author : xiaojinzi
*/
public abstract class BaseProcessor extends AbstractProcessor {
public static final String NORMALLINE = "---------------------------";
/*当没有配置 host 信息或者配置错误的异常*/
public static final RuntimeException NULLHOSTEXCEPTION = new RuntimeException("the host must not be null,you must define host in build.gradle file,such as:\n\n" +
"defaultConfig {\n" +
" minSdkVersion 14\n" +
" targetSdkVersion 27\n" +
" versionCode 1\n" +
" versionName \"1.0\"\n\n" +
" javaCompileOptions {\n" +
" annotationProcessorOptions {\n" +
" arguments = [HOST: \"user\"]\n" +
" }\n" +
" }\n" +
"}\n \n");
/*当没有依赖实现库的异常*/
public static final RuntimeException IMPL_NOT_BE_DEPENDENT_ON = new RuntimeException(
"Did your module depend on component-impl or component-impl-rx? \n" +
"if your project is androidx, please config \n" +
"api 'com.github.xiaojinzi123.Component:component-impl:<version>-androidx' \n" +
"or\n" +
"api 'com.github.xiaojinzi123.Component:component-impl-rx:<version>-androidx' \n" +
"Otherwise, config \n" +
"api 'com.github.xiaojinzi123.Component:component-impl:<version>' \n" +
"or\n" +
"api 'com.github.xiaojinzi123.Component:component-impl-rx:<version>' \n" +
"<version> is the last version of Component, don't forget to replace" +
"see https://github.com/xiaojinzi123/Component/releases"
);
protected String routerDocFolder = null;
protected boolean routerDocEnable;
protected Filer mFiler;
protected Messager mMessager;
protected Types mTypes;
protected Elements mElements;
protected TypeElement mTypeElementComponentGeneratedAnno;
protected TypeElement mTypeElementString;
protected TypeElement mTypeElementInteger;
protected TypeElement mTypeElementList;
protected TypeElement mTypeElementArrayList;
protected TypeElement mTypeElementSparseArray;
protected TypeElement mTypeElementHashMap;
protected TypeElement mTypeElementHashSet;
protected ClassName mClassNameComponentGeneratedAnno;
protected ClassName mClassNameString;
protected ClassName mClassNameList;
protected ClassName mClassNameArrayList;
protected ClassName mClassNameSparseArray;
protected ClassName mClassNameHashMap;
protected ClassName mClassNameHashSet;
protected ClassName mClassNameKeep;
protected ClassName mClassNameNonNull;
protected TypeName mTypeNameString;
@Override
public synchronized void init(ProcessingEnvironment processingEnvironment) {
super.init(processingEnvironment);
Map<String, String> options = processingEnv.getOptions();
if (options != null) {
routerDocFolder = options.get("RouterDocFolder");
routerDocEnable = Boolean.parseBoolean(options.get("RouterDocEnable"));
}
mFiler = processingEnv.getFiler();
mMessager = processingEnv.getMessager();
mTypes = processingEnv.getTypeUtils();
mElements = processingEnv.getElementUtils();
try {
mTypeElementComponentGeneratedAnno = mElements.getTypeElement(ComponentConstants.COMPONENT_GENERATED_ANNO_CLASS_NAME);
mClassNameComponentGeneratedAnno = ClassName.get(mTypeElementComponentGeneratedAnno);
} catch (Exception e) {
throw IMPL_NOT_BE_DEPENDENT_ON;
}
mTypeElementString = mElements.getTypeElement(ComponentConstants.JAVA_STRING);
mTypeElementInteger = mElements.getTypeElement(ComponentConstants.JAVA_INTEGER);
mTypeElementList = mElements.getTypeElement(ComponentConstants.JAVA_LIST);
mTypeElementArrayList = mElements.getTypeElement(ComponentConstants.JAVA_ARRAYLIST);
mTypeElementSparseArray = mElements.getTypeElement(ComponentConstants.ANDROID_SPARSEARRAY);
mTypeElementHashMap = mElements.getTypeElement(ComponentConstants.JAVA_HASHMAP);
mTypeElementHashSet = mElements.getTypeElement(ComponentConstants.JAVA_HASHSET);
mClassNameString = ClassName.get(mTypeElementString);
mClassNameList = ClassName.get(mTypeElementList);
mClassNameArrayList = ClassName.get(mTypeElementArrayList);
mClassNameSparseArray = ClassName.get(mTypeElementSparseArray);
mClassNameHashMap = ClassName.get(mTypeElementHashMap);
mClassNameHashSet = ClassName.get(mTypeElementHashSet);
mTypeNameString = TypeName.get(mTypeElementString.asType());
// androidx 和 非 androidx 的两个注解
mClassNameKeep = ClassName.get(mElements.getTypeElement(ComponentConstants.ANDROID_ANNOTATION_KEEP));
mClassNameNonNull = ClassName.get(mElements.getTypeElement(ComponentConstants.ANDROID_ANNOTATION_NONNULL));
if (mClassNameKeep == null || mClassNameNonNull == null) {
String addDependencyTip = getAddDependencyTip(Arrays.asList(
ComponentConstants.ANDROID_ANNOTATION_KEEP,
ComponentConstants.ANDROID_ANNOTATION_NONNULL
), true);
throw new ProcessException(addDependencyTip + " \nif you add dependency already, then your configuration is wrong. " +
"If you use androidx, see https://github.com/xiaojinzi123/Component/wiki/%E4%BE%9D%E8%B5%96%E5%92%8C%E9%85%8D%E7%BD%AE-AndroidX " +
"\n else see https://github.com/xiaojinzi123/Component/wiki/%E4%BE%9D%E8%B5%96%E5%92%8C%E9%85%8D%E7%BD%AE");
}
}
protected String getAddDependencyTip(List<String> classPathList, boolean isOr) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < classPathList.size(); i++) {
String classPath = classPathList.get(i);
if (i == 0) {
sb.append("'")
.append(classPath)
.append("'");
} else {
sb.append(" ").append(isOr ? "or" : "and").append(" ").append("'").append(classPath).append("'");
}
}
sb.append(" ").append("can't be found, did you add dependency to build.gradle?");
return sb.toString();
}
protected boolean isRouterDocEnable() {
return routerDocEnable && (routerDocFolder != null && !routerDocFolder.isEmpty());
}
}
|
注解驱动器中当找不到 @Keep 和 @NonNull 注解的时候的错误提示优化
|
ComponentCompiler/src/main/java/com/xiaojinzi/component/BaseProcessor.java
|
注解驱动器中当找不到 @Keep 和 @NonNull 注解的时候的错误提示优化
|
|
Java
|
apache-2.0
|
01ee83453512e1129b5c2aa25afe573bf47a3c08
| 0
|
sangramjadhav/testrs
|
2254260c-2ece-11e5-905b-74de2bd44bed
|
hello.java
|
225396e2-2ece-11e5-905b-74de2bd44bed
|
2254260c-2ece-11e5-905b-74de2bd44bed
|
hello.java
|
2254260c-2ece-11e5-905b-74de2bd44bed
|
|
Java
|
apache-2.0
|
dbd017b25ad8f6ae64786d1eace80ce184dacc04
| 0
|
apache/openwebbeans,apache/openwebbeans,apache/openwebbeans
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.apache.webbeans.config;
import java.util.HashMap;
import java.util.Map;
import org.apache.webbeans.exception.WebBeansException;
import org.apache.webbeans.util.Asserts;
import org.apache.webbeans.util.WebBeansUtil;
/**
* Holds singletons based on the deployment
* class loader.
*
* @version $Rev$ $Date$
*
*/
public final class WebBeansFinder
{
/**
* Keys --> ClassLoaders
* Values --> Maps of singleton class name with object
*/
private static Map<ClassLoader, Map<String, Object>> singletonMap = new HashMap<ClassLoader, Map<String,Object>>();
/**
* No instantiate.
*/
private WebBeansFinder()
{
//No action
}
/**
* Gets signelton instance.
* @param singletonName singleton class name
* @return singleton instance
*/
public static Object getSingletonInstance(String singletonName)
{
return getSingletonInstance(singletonName, WebBeansUtil.getCurrentClassLoader());
}
/**
* Gets singleton instance for deployment.
* @param singletonName singleton class name
* @param classLoader classloader of the deployment
* @return signelton instance for this deployment
*/
public static Object getSingletonInstance(String singletonName, ClassLoader classLoader)
{
Object object = null;
synchronized (singletonMap)
{
Map<String, Object> managerMap = singletonMap.get(classLoader);
if (managerMap == null)
{
managerMap = new HashMap<String, Object>();
singletonMap.put(classLoader, managerMap);
}
object = managerMap.get(singletonName);
/* No singleton for this application, create one */
if (object == null)
{
try
{
//Load class
Class<?> clazz = classLoader.loadClass(singletonName);
//Create instance
object = clazz.newInstance();
//Save it
managerMap.put(singletonName, object);
}
catch (InstantiationException e)
{
throw new WebBeansException("Unable to instantiate class : " + singletonName, e);
}
catch (IllegalAccessException e)
{
throw new WebBeansException("Illegal access exception in creating instance with class : " + singletonName, e);
}
catch (ClassNotFoundException e)
{
throw new WebBeansException("Class not found exception in creating instance with class : " + singletonName, e); }
}
}
return object;
}
/**
* Clear all deployment instances when the application is undeployed.
* @param classloader of the deployment
*/
public static void clearInstances(ClassLoader classLoader)
{
Asserts.assertNotNull(classLoader, "classloader is null");
synchronized (singletonMap)
{
singletonMap.remove(classLoader);
}
}
}
|
webbeans-impl/src/main/java/org/apache/webbeans/config/WebBeansFinder.java
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.apache.webbeans.config;
import java.util.HashMap;
import java.util.Map;
import org.apache.webbeans.exception.WebBeansException;
import org.apache.webbeans.util.Asserts;
import org.apache.webbeans.util.WebBeansUtil;
/**
* Holds singletons based on the deployment
* class loader.
*
* @version $Rev$ $Date$
*
*/
public final class WebBeansFinder
{
/**
* Keys --> ClassLoaders
* Values --> Maps of singleton class name with object
*/
private static Map<ClassLoader, Map<String, Object>> singletonMap = new HashMap<ClassLoader, Map<String,Object>>();
/**
* No instantiate.
*/
private WebBeansFinder()
{
//No action
}
/**
* Gets signelton instance.
* @param singletonName singleton class name
* @return singleton instance
*/
public static Object getSingletonInstance(String singletonName)
{
return getSingletonInstance(singletonName, WebBeansUtil.getCurrentClassLoader());
}
public static Object getSingletonInstance(String singletonName, ClassLoader classLoader)
{
Object object = null;
synchronized (singletonMap)
{
Map<String, Object> managerMap = singletonMap.get(classLoader);
if (managerMap == null)
{
managerMap = new HashMap<String, Object>();
singletonMap.put(classLoader, managerMap);
}
object = managerMap.get(singletonName);
/* No singleton for this application, create one */
if (object == null)
{
try
{
//Load class
Class<?> clazz = classLoader.loadClass(singletonName);
//Create instance
object = clazz.newInstance();
//Save it
managerMap.put(singletonName, object);
}
catch (InstantiationException e)
{
throw new WebBeansException("Unable to instantiate class : " + singletonName, e);
}
catch (IllegalAccessException e)
{
throw new WebBeansException("Illegal access exception in creating instance with class : " + singletonName, e);
}
catch (ClassNotFoundException e)
{
throw new WebBeansException("Class not found exception in creating instance with class : " + singletonName, e); }
}
}
return object;
}
/**
* Clear all deployment instances when the application is undeployed.
* @param classloader of the deployment
*/
public static void clearInstances(ClassLoader classLoader)
{
Asserts.assertNotNull(classLoader, "classloader is null");
synchronized (singletonMap)
{
singletonMap.remove(classLoader);
}
}
}
|
Adding comment
git-svn-id: 6e2e506005f11016269006bf59d22f905406eeba@942222 13f79535-47bb-0310-9956-ffa450edef68
|
webbeans-impl/src/main/java/org/apache/webbeans/config/WebBeansFinder.java
|
Adding comment
|
|
Java
|
apache-2.0
|
f93d7805dd815b639af6016a22956de76b3e12e9
| 0
|
bmatthias/config-builder,TNG/config-builder
|
package com.tngtech.configbuilder.util;
import com.tngtech.configbuilder.annotation.typetransformer.ITypeTransformer;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
@RunWith(MockitoJUnitRunner.class)
public class ClassCastingHelperTest {
private class testClass implements ITypeTransformer<String, Collection<String>> {
@Override
public Collection<String> transform(String argument) {
return null;
}
}
private ClassCastingHelper classCastingHelper;
@Before
public void setUp() {
classCastingHelper = new ClassCastingHelper();
}
@Test
public void testForCorrectWrappers() {
assertEquals(classCastingHelper.getWrapperClassForPrimitive(int.class), Integer.class);
assertEquals(classCastingHelper.getWrapperClassForPrimitive(boolean.class), Boolean.class);
assertEquals(classCastingHelper.getWrapperClassForPrimitive(double.class), Double.class);
assertEquals(classCastingHelper.getWrapperClassForPrimitive(byte.class), Byte.class);
}
@Test
public void testCastTypeToClass() {
Type[] interfaceType = testClass.class.getGenericInterfaces();
Type[] genericTypes = ((ParameterizedType) interfaceType[0]).getActualTypeArguments();
assertEquals(classCastingHelper.castTypeToClass(genericTypes[0]).getClass(), Class.class);
assertEquals(classCastingHelper.castTypeToClass(genericTypes[1]).getClass(), Class.class);
}
}
|
src/test/java/com/tngtech/configbuilder/util/ClassCastingHelperTest.java
|
package com.tngtech.configbuilder.util;
import com.tngtech.configbuilder.annotation.typetransformer.ITypeTransformer;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.util.Collection;
import static org.junit.Assert.assertEquals;
@RunWith(MockitoJUnitRunner.class)
public class ClassCastingHelperTest {
private class testClass implements ITypeTransformer<String, Collection<String>> {
@Override
public Collection<String> transform(String argument) {
return null;
}
}
private ClassCastingHelper classCastingHelper;
@Before
public void setUp() {
classCastingHelper = new ClassCastingHelper();
}
@Test
public void testForCorrectWrappers() {
assertEquals(classCastingHelper.getWrapperClassForPrimitive(int.class), Integer.class);
assertEquals(classCastingHelper.getWrapperClassForPrimitive(boolean.class), Boolean.class);
assertEquals(classCastingHelper.getWrapperClassForPrimitive(double.class), Double.class);
}
@Test
public void testCastTypeToClass() {
Type[] interfaceType = testClass.class.getGenericInterfaces();
Type[] genericTypes = ((ParameterizedType) interfaceType[0]).getActualTypeArguments();
assertEquals(classCastingHelper.castTypeToClass(genericTypes[0]).getClass(), Class.class);
assertEquals(classCastingHelper.castTypeToClass(genericTypes[1]).getClass(), Class.class);
}
}
|
Unnecessary push for Travis
|
src/test/java/com/tngtech/configbuilder/util/ClassCastingHelperTest.java
|
Unnecessary push for Travis
|
|
Java
|
bsd-3-clause
|
5c3400b57a9a22d6d37fcd61a5f9e6a13f97bb85
| 0
|
interdroid/ibis-ipl,interdroid/ibis-ipl,interdroid/ibis-ipl
|
/* $Id$ */
package ibis.satin.impl;
import ibis.ipl.IbisError;
import ibis.ipl.IbisIdentifier;
public abstract class SpawnSync extends Termination {
private static SpawnCounter spawnCounterCache = null;
/**
* Obtains a new spawn counter. This does not need to be synchronized, only
* one thread spawns.
*
* @return a new spawn counter.
*/
static public SpawnCounter newSpawnCounter() {
if (spawnCounterCache == null) {
return new SpawnCounter();
}
SpawnCounter res = spawnCounterCache;
spawnCounterCache = res.next;
return res;
}
/**
* Makes a spawn counter available for recycling. This does not need to be
* synchronized, only one thread spawns.
*
* @param s
* the spawn counter made available.
*/
static public void deleteSpawnCounter(SpawnCounter s) {
if (ASSERTS && s.value < 0) {
spawnLogger.fatal("deleteSpawnCounter: spawncouner < 0, val ="
+ s.value, new Throwable());
System.exit(1);
}
// Only put it in the cache if its value is 0.
// If not, there may be references to it yet.
if (s.value == 0) {
s.next = spawnCounterCache;
spawnCounterCache = s;
}
}
protected void callSatinFunction(InvocationRecord r) {
InvocationRecord oldParent = null;
int oldParentStamp = 0;
IbisIdentifier oldParentOwner = null;
handleDelayedMessages();
if (ABORTS || FAULT_TOLERANCE) {
oldParent = parent;
oldParentStamp = parentStamp;
oldParentOwner = parentOwner;
}
if (ASSERTS) {
if (r == null) {
spawnLogger.fatal("SATIN '" + ident
+ ": EEK, r = null in callSatinFunc",
new Throwable());
System.exit(1);
}
if (r.aborted) {
spawnLogger.fatal("SATIN '" + ident
+ ": spawning aborted job!",
new Throwable());
System.exit(1);
}
if (r.owner == null) {
spawnLogger.fatal("SATIN '" + ident
+ ": EEK, r.owner = null in callSatinFunc, r = " + r,
new Throwable());
System.exit(1);
}
if (r.owner.equals(ident)) {
if (r.spawnCounter == null) {
spawnLogger.fatal("SATIN '" + ident
+ ": EEK, r.spawnCounter = null in callSatinFunc, "
+ "r = " + r,
new Throwable());
System.exit(1);
}
if (r.spawnCounter.value < 0) {
spawnLogger.fatal("SATIN '" + ident
+ ": spawncounter < 0 in callSatinFunc",
new Throwable());
System.exit(1);
}
if (ABORTS && r.parent == null && parentOwner.equals(ident)
&& r.parentStamp != -1) {
spawnLogger.fatal("SATIN '" + ident
+ ": parent is null for non-root, should not "
+ "happen here! job = " + r,
new Throwable());
System.exit(1);
}
}
}
if ((ABORTS || FAULT_TOLERANCE) && r.parent != null
&& r.parent.aborted) {
if (abortLogger.isDebugEnabled()) {
abortLogger.debug("SATIN '" + ident
+ ": spawning job, parent was aborted! job = " + r
+ ", parent = " + r.parent);
}
if (spawnLogger.isDebugEnabled()) {
r.spawnCounter.decr(r);
} else {
r.spawnCounter.value--;
}
if (ASSERTS) {
if (r.spawnCounter.value < 0) {
spawnLogger.fatal("SATIN '" + ident
+ ": Just made spawncounter < 0",
new Throwable());
System.exit(1);
}
}
return;
}
if (ABORTS || FAULT_TOLERANCE) {
onStack.push(r);
parent = r;
parentStamp = r.stamp;
parentOwner = r.owner;
}
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': callSatinFunc: stamp = " + r.stamp + ", owner = "
+ (r.owner.equals(ident) ? "me" : r.owner.toString())
+ ", parentStamp = " + r.parentStamp + ", parentOwner = "
+ r.parentOwner);
}
if (r.owner.equals(ident)) {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': callSatinFunc: spawn counter = "
+ r.spawnCounter.value);
}
if (ABORTS) {
if (SPAWN_STATS) {
jobsExecuted++;
}
try {
r.runLocal();
} catch (Throwable t) {
// This can only happen if an inlet has thrown an
// exception, or if there was no try-catch block around
// the spawn (i.e. no inlet).
// The semantics of this: all work is aborted,
// and the exception is passed on to the spawner.
// The parent is aborted, it must handle the exception.
r.eek = t;
handleInlet(r);
}
} else { // NO aborts
if (SPAWN_STATS) {
jobsExecuted++;
}
try {
r.runLocal();
} catch (Throwable t) {
throw new IbisError("Unexpected exception in runLocal", t);
}
}
if (spawnLogger.isDebugEnabled()) {
r.spawnCounter.decr(r);
} else {
r.spawnCounter.value--;
}
if (ASSERTS && r.spawnCounter.value < 0) {
spawnLogger.fatal("SATIN '" + ident
+ ": Just made spawncounter < 0",
new Throwable());
System.exit(1);
}
if (ASSERTS && !ABORTS && r.eek != null) {
spawnLogger.fatal("SATIN '" + ident
+ ": Got exception: " + r.eek, r.eek);
System.exit(1);
}
spawnLogger.debug("SATIN '" + ident
+ ": callSatinFunc: stamp = " + r.stamp
+ ", parentStamp = " + r.parentStamp
+ ", parentOwner = " + r.parentOwner
+ " spawn counter = " + r.spawnCounter.value + " DONE");
if (r.eek != null) {
spawnLogger.debug("SATIN '" + ident
+ ": exception was " + r.eek, r.eek);
}
if (FAULT_TOLERANCE && !FT_WITHOUT_ABORTS && !FT_NAIVE) {
//job is finished
attachToParentFinished(r);
}
} else {
if (stealLogger.isDebugEnabled()) {
stealLogger.debug("SATIN '" + ident
+ "': RUNNING REMOTE CODE!");
}
ReturnRecord rr = null;
if (ABORTS) {
if (SPAWN_STATS) {
jobsExecuted++;
}
try {
rr = r.runRemote();
// May be needed if the method did not throw an exception,
// but its child did, and there is an empty inlet.
rr.eek = r.eek;
} catch (Throwable t) {
spawnLogger.fatal("SATIN '" + ident
+ ": OOOhh dear, got exception in runremote: " + t,
t);
System.exit(1);
}
} else {
if (SPAWN_STATS) {
jobsExecuted++;
}
rr = r.runRemote();
}
if (stealLogger.isDebugEnabled()) {
if (r.eek != null) {
stealLogger.debug("SATIN '" + ident
+ "': RUNNING REMOTE CODE GAVE EXCEPTION: " + r.eek,
r.eek);
} else {
stealLogger.debug("SATIN '" + ident
+ "': RUNNING REMOTE CODE DONE!");
}
}
// send wrapper back to the owner
if (!r.aborted) {
sendResult(r, rr);
}
if (stealLogger.isDebugEnabled()) {
stealLogger.debug("SATIN '" + ident
+ "': REMOTE CODE SEND RESULT DONE!");
}
}
if (ABORTS || FAULT_TOLERANCE) {
// restore these, there may be more spawns afterwards...
parentStamp = oldParentStamp;
parentOwner = oldParentOwner;
parent = oldParent;
onStack.pop();
}
if (abortLogger.isDebugEnabled() && r.aborted) {
if (r.eek != null) {
abortLogger.debug("Job on the stack was aborted: " + r.stamp
+ " EEK = " + r.eek, r.eek);
} else {
abortLogger.debug("Job on the stack was aborted: " + r.stamp
+ " EEK = null");
}
}
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': call satin func done!");
}
}
/**
* Spawns the method invocation as described by the specified invocation
* record. The invocation record is added to the job queue maintained by
* this Satin.
*
* @param r
* the invocation record specifying the spawned invocation.
*/
public void spawn(InvocationRecord r) {
if (ASSERTS) {
if (algorithm instanceof MasterWorker) {
synchronized (this) {
if (!ident.equals(masterIdent)) {
spawnLogger.fatal("with the master/worker algorithm, "
+ "work can only be spawned on the master!");
System.exit(1);
}
}
}
}
if (ABORTS && parent != null && parent.aborted) {
abortLogger.debug("parent " + parent.stamp
+ " is aborted, spawn ignored");
return;
}
if (SPAWN_STATS) {
spawns++;
}
if (branchingFactor > 0) {
//globally unique stamps start from 1 (root job)
if (parentStamp > 0) {
r.stamp = branchingFactor * parentStamp + parent.numSpawned++;
} else {
//parent is the root
r.stamp = branchingFactor + rootNumSpawned++;
}
} else {
r.stamp = stampCounter++;
}
r.owner = ident;
if (spawnLogger.isDebugEnabled()) {
r.spawnCounter.incr(r);
} else {
r.spawnCounter.value++;
}
if (ABORTS || FAULT_TOLERANCE) {
r.parentStamp = parentStamp;
r.parentOwner = parentOwner;
r.parent = parent;
/*
* if(parent != null) {
* for (int i=0; i <parent.parentStamps.size(); i++) {
* r.parentStamps.add(parent.parentStamps.get(i));
* r.parentOwners.add(parent.parentOwners.get(i));
* }
* }
*
* r.parentStamps.add(new Integer(parentStamp));
* r.parentOwners.add(parentOwner);
*/
}
if (FAULT_TOLERANCE && !FT_NAIVE) {
if (parent != null && parent.reDone
|| parent == null && restarted) {
r.reDone = true;
}
}
if (FAULT_TOLERANCE && !FT_NAIVE) {
if (r.reDone) {
if (globalResultTableCheck(r)) {
return;
}
}
}
q.addToHead(r);
algorithm.jobAdded();
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident + "': Spawn, counter = "
+ r.spawnCounter.value + ", stamp = " + r.stamp
+ ", parentStamp = " + r.parentStamp + ", owner = "
+ r.owner + ", parentOwner = " + r.parentOwner);
}
}
/**
* Waits for the jobs as specified by the spawncounter given, but meanwhile
* execute jobs from the end of the jobqueue (or rather, the head of the job
* queue, where new jobs are added).
*
* @param s
* the spawncounter.
*/
public void sync(SpawnCounter s) {
InvocationRecord r;
if (SPAWN_STATS) {
syncs++;
}
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, counter = " + s.value);
}
if (s.value == 0) { // sync is poll
satinPoll();
handleDelayedMessages();
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync returns");
}
return;
}
// int numStealAttempts = 0;
while (s.value > 0) {
// if(exiting) {
// System.err.println("EXIT FROM SYNC");
// exit();
// }
satinPoll();
handleDelayedMessages();
r = q.getFromHead(); // Try the local queue
if (r != null) {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, start own job");
}
callSatinFunction(r);
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, finish own job");
}
} else {
if (FAULT_TOLERANCE && FT_WITHOUT_ABORTS) {
//before you steal, check if kids need
//to be restarted
InvocationRecord curr = null;
if (parent != null) {
curr = parent.toBeRestartedChild;
parent.toBeRestartedChild = null;
} else {
curr = rootToBeRestartedChild;
rootToBeRestartedChild = null;
}
if (curr != null) {
int i = 0;
while (curr != null) {
//is it really necessary??
if (!globalResultTableCheck(curr)) {
q.addToTail(curr);
}
InvocationRecord tmp = curr;
curr = curr.toBeRestartedSibling;
tmp.toBeRestartedSibling = null;
i++;
}
} else {
r = algorithm.clientIteration();
if (r != null) {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, start stolen job");
}
callSatinFunction(r);
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, finish stolen job");
}
}
}
} else {
r = algorithm.clientIteration();
if (r != null) {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, start stolen job");
}
callSatinFunction(r);
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, finish stolen job");
}
}
}
}
}
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync returns");
}
}
/**
* Implements the main client loop: steal jobs and execute them.
*/
public void client() {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident + "': starting client!");
}
while (!exiting) {
// steal and run jobs
satinPoll();
handleDelayedMessages();
InvocationRecord r = algorithm.clientIteration();
if (r != null) {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': client, start stolen job");
}
callSatinFunction(r);
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': client, finish stolen job");
}
}
//for ft
if (master) {
return;
}
}
}
}
|
src/ibis/satin/impl/SpawnSync.java
|
/* $Id$ */
package ibis.satin.impl;
import ibis.ipl.IbisError;
import ibis.ipl.IbisIdentifier;
public abstract class SpawnSync extends Termination {
private static SpawnCounter spawnCounterCache = null;
/**
* Obtains a new spawn counter. This does not need to be synchronized, only
* one thread spawns.
*
* @return a new spawn counter.
*/
static public SpawnCounter newSpawnCounter() {
if (spawnCounterCache == null) {
return new SpawnCounter();
}
SpawnCounter res = spawnCounterCache;
spawnCounterCache = res.next;
return res;
}
/**
* Makes a spawn counter available for recycling. This does not need to be
* synchronized, only one thread spawns.
*
* @param s
* the spawn counter made available.
*/
static public void deleteSpawnCounter(SpawnCounter s) {
if (ASSERTS && s.value < 0) {
spawnLogger.fatal("deleteSpawnCounter: spawncouner < 0, val ="
+ s.value, new Throwable());
System.exit(1);
}
// Only put it in the cache if its value is 0.
// If not, there may be references to it yet.
if (s.value == 0) {
s.next = spawnCounterCache;
spawnCounterCache = s;
}
}
protected void callSatinFunction(InvocationRecord r) {
InvocationRecord oldParent = null;
int oldParentStamp = 0;
IbisIdentifier oldParentOwner = null;
handleDelayedMessages();
if (ABORTS || FAULT_TOLERANCE) {
oldParent = parent;
oldParentStamp = parentStamp;
oldParentOwner = parentOwner;
}
if (ASSERTS) {
if (r == null) {
spawnLogger.fatal("SATIN '" + ident
+ ": EEK, r = null in callSatinFunc",
new Throwable());
System.exit(1);
}
if (r.aborted) {
spawnLogger.fatal("SATIN '" + ident
+ ": spawning aborted job!",
new Throwable());
System.exit(1);
}
if (r.owner == null) {
spawnLogger.fatal("SATIN '" + ident
+ ": EEK, r.owner = null in callSatinFunc, r = " + r,
new Throwable());
System.exit(1);
}
if (r.owner.equals(ident)) {
if (r.spawnCounter == null) {
spawnLogger.fatal("SATIN '" + ident
+ ": EEK, r.spawnCounter = null in callSatinFunc, "
+ "r = " + r,
new Throwable());
System.exit(1);
}
if (r.spawnCounter.value < 0) {
spawnLogger.fatal("SATIN '" + ident
+ ": spawncounter < 0 in callSatinFunc",
new Throwable());
System.exit(1);
}
if (ABORTS && r.parent == null && parentOwner.equals(ident)
&& r.parentStamp != -1) {
spawnLogger.fatal("SATIN '" + ident
+ ": parent is null for non-root, should not "
+ "happen here! job = " + r,
new Throwable());
System.exit(1);
}
}
}
if ((ABORTS || FAULT_TOLERANCE) && r.parent != null
&& r.parent.aborted) {
if (abortLogger.isDebugEnabled()) {
abortLogger.debug("SATIN '" + ident
+ ": spawning job, parent was aborted! job = " + r
+ ", parent = " + r.parent);
}
if (spawnLogger.isDebugEnabled()) {
r.spawnCounter.decr(r);
} else {
r.spawnCounter.value--;
}
if (ASSERTS) {
if (r.spawnCounter.value < 0) {
spawnLogger.fatal("SATIN '" + ident
+ ": Just made spawncounter < 0",
new Throwable());
System.exit(1);
}
}
return;
}
if (ABORTS || FAULT_TOLERANCE) {
onStack.push(r);
parent = r;
parentStamp = r.stamp;
parentOwner = r.owner;
}
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': callSatinFunc: stamp = " + r.stamp + ", owner = "
+ (r.owner.equals(ident) ? "me" : r.owner.toString())
+ ", parentStamp = " + r.parentStamp + ", parentOwner = "
+ r.parentOwner);
}
if (r.owner.equals(ident)) {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': callSatinFunc: spawn counter = "
+ r.spawnCounter.value);
}
if (ABORTS) {
if (SPAWN_STATS) {
jobsExecuted++;
}
try {
r.runLocal();
} catch (Throwable t) {
// This can only happen if an inlet has thrown an
// exception, or if there was no try-catch block around
// the spawn (i.e. no inlet).
// The semantics of this: all work is aborted,
// and the exception is passed on to the spawner.
// The parent is aborted, it must handle the exception.
r.eek = t;
handleInlet(r);
}
} else { // NO aborts
if (SPAWN_STATS) {
jobsExecuted++;
}
try {
r.runLocal();
} catch (Throwable t) {
throw new IbisError("Unexpected exception in runLocal", t);
}
}
if (spawnLogger.isDebugEnabled()) {
r.spawnCounter.decr(r);
} else {
r.spawnCounter.value--;
}
if (ASSERTS && r.spawnCounter.value < 0) {
spawnLogger.fatal("SATIN '" + ident
+ ": Just made spawncounter < 0",
new Throwable());
System.exit(1);
}
if (ASSERTS && !ABORTS && r.eek != null) {
spawnLogger.fatal("SATIN '" + ident
+ ": Got exception: " + r.eek, r.eek);
System.exit(1);
}
spawnLogger.debug("SATIN '" + ident
+ ": callSatinFunc: stamp = " + r.stamp
+ ", parentStamp = " + r.parentStamp
+ ", parentOwner = " + r.parentOwner
+ " spawn counter = " + r.spawnCounter.value + " DONE");
if (r.eek != null) {
spawnLogger.debug("SATIN '" + ident
+ ": exception was " + r.eek, r.eek);
}
if (FAULT_TOLERANCE && !FT_WITHOUT_ABORTS && !FT_NAIVE) {
//job is finished
attachToParentFinished(r);
}
} else {
if (stealLogger.isDebugEnabled()) {
stealLogger.debug("SATIN '" + ident
+ "': RUNNING REMOTE CODE!");
}
ReturnRecord rr = null;
if (ABORTS) {
if (SPAWN_STATS) {
jobsExecuted++;
}
try {
rr = r.runRemote();
// May be needed if the method did not throw an exception,
// but its child did, and there is an empty inlet.
rr.eek = r.eek;
} catch (Throwable t) {
spawnLogger.fatal("SATIN '" + ident
+ ": OOOhh dear, got exception in runremote: " + t,
t);
System.exit(1);
}
} else {
if (SPAWN_STATS) {
jobsExecuted++;
}
rr = r.runRemote();
}
if (stealLogger.isDebugEnabled()) {
if (r.eek != null) {
stealLogger.debug("SATIN '" + ident
+ "': RUNNING REMOTE CODE GAVE EXCEPTION: " + r.eek,
r.eek);
} else {
stealLogger.debug("SATIN '" + ident
+ "': RUNNING REMOTE CODE DONE!");
}
}
// send wrapper back to the owner
if (!r.aborted) {
sendResult(r, rr);
}
if (stealLogger.isDebugEnabled()) {
stealLogger.debug("SATIN '" + ident
+ "': REMOTE CODE SEND RESULT DONE!");
}
}
if (ABORTS || FAULT_TOLERANCE) {
// restore these, there may be more spawns afterwards...
parentStamp = oldParentStamp;
parentOwner = oldParentOwner;
parent = oldParent;
onStack.pop();
}
if (abortLogger.isDebugEnabled() && r.aborted) {
if (r.eek != null) {
abortLogger.debug("Job on the stack was aborted: " + r.stamp
+ " EEK = " + r.eek, r.eek);
} else {
abortLogger.debug("Job on the stack was aborted: " + r.stamp
+ " EEK = null");
}
}
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': call satin func done!");
}
}
/**
* Spawns the method invocation as described by the specified invocation
* record. The invocation record is added to the job queue maintained by
* this Satin.
*
* @param r
* the invocation record specifying the spawned invocation.
*/
public void spawn(InvocationRecord r) {
if (ASSERTS) {
if (algorithm instanceof MasterWorker) {
synchronized (this) {
if (!ident.equals(masterIdent)) {
spawnLogger.fatal("with the master/worker algorithm, "
+ "work can only be spawned on the master!");
System.exit(1);
}
}
}
}
if (ABORTS && parent != null && parent.aborted) {
abortLogger.debug("parent " + parent.stamp
+ " is aborted, spawn ignored");
return;
}
if (SPAWN_STATS) {
spawns++;
}
if (branchingFactor > 0) {
//globally unique stamps start from 1 (root job)
if (parentStamp > 0) {
r.stamp = branchingFactor * parentStamp + parent.numSpawned++;
} else {
//parent is the root
r.stamp = branchingFactor + rootNumSpawned++;
}
} else {
r.stamp = stampCounter++;
}
r.owner = ident;
if (spawnLogger.isDebugEnabled()) {
r.spawnCounter.incr(r);
} else {
r.spawnCounter.value++;
}
if (ABORTS || FAULT_TOLERANCE) {
r.parentStamp = parentStamp;
r.parentOwner = parentOwner;
r.parent = parent;
/*
* if(parent != null) {
* for (int i=0; i <parent.parentStamps.size(); i++) {
* r.parentStamps.add(parent.parentStamps.get(i));
* r.parentOwners.add(parent.parentOwners.get(i));
* }
* }
*
* r.parentStamps.add(new Integer(parentStamp));
* r.parentOwners.add(parentOwner);
*/
}
if (FAULT_TOLERANCE && !FT_NAIVE) {
if (parent != null && parent.reDone
|| parent == null && restarted) {
r.reDone = true;
}
}
if (FAULT_TOLERANCE && !FT_NAIVE) {
if (r.reDone) {
if (globalResultTableCheck(r)) {
return;
}
}
}
q.addToHead(r);
algorithm.jobAdded();
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident + "': Spawn, counter = "
+ r.spawnCounter.value + ", stamp = " + r.stamp
+ ", parentStamp = " + r.parentStamp + ", owner = "
+ r.owner + ", parentOwner = " + r.parentOwner);
}
}
/**
* Waits for the jobs as specified by the spawncounter given, but meanwhile
* execute jobs from the end of the jobqueue (or rather, the head of the job
* queue, where new jobs are added).
*
* @param s
* the spawncounter.
*/
public void sync(SpawnCounter s) {
InvocationRecord r;
if (SPAWN_STATS) {
syncs++;
}
if (s.value == 0) { // sync is poll
satinPoll();
handleDelayedMessages();
return;
}
// int numStealAttempts = 0;
while (s.value > 0) {
// if(exiting) {
// System.err.println("EXIT FROM SYNC");
// exit();
// }
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident
+ "': Sync, counter = " + s.value);
}
satinPoll();
handleDelayedMessages();
r = q.getFromHead(); // Try the local queue
if (r != null) {
callSatinFunction(r);
} else {
if (FAULT_TOLERANCE && FT_WITHOUT_ABORTS) {
//before you steal, check if kids need
//to be restarted
InvocationRecord curr = null;
if (parent != null) {
curr = parent.toBeRestartedChild;
parent.toBeRestartedChild = null;
} else {
curr = rootToBeRestartedChild;
rootToBeRestartedChild = null;
}
if (curr != null) {
int i = 0;
while (curr != null) {
//is it really necessary??
if (!globalResultTableCheck(curr)) {
q.addToTail(curr);
}
InvocationRecord tmp = curr;
curr = curr.toBeRestartedSibling;
tmp.toBeRestartedSibling = null;
i++;
}
} else {
r = algorithm.clientIteration();
if (r != null) {
callSatinFunction(r);
}
}
} else {
r = algorithm.clientIteration();
if (r != null) {
callSatinFunction(r);
}
}
}
}
}
/**
* Implements the main client loop: steal jobs and execute them.
*/
public void client() {
if (spawnLogger.isDebugEnabled()) {
spawnLogger.debug("SATIN '" + ident + "': starting client!");
}
while (!exiting) {
// steal and run jobs
satinPoll();
handleDelayedMessages();
InvocationRecord r = algorithm.clientIteration();
if (r != null) {
callSatinFunction(r);
}
//for ft
if (master) {
return;
}
}
}
}
|
Some more logging
git-svn-id: f22e84ca493ccad7df8d2727bca69d1c9fc2e5c5@3153 aaf88347-d911-0410-b711-e54d386773bb
|
src/ibis/satin/impl/SpawnSync.java
|
Some more logging
|
|
Java
|
bsd-3-clause
|
da7548919703f238be511f080ce0fb508fae6552
| 0
|
NCIP/cadsr-cdecurate,NCIP/cadsr-cdecurate,NCIP/cadsr-cdecurate,NCIP/cadsr-cdecurate,NCIP/cadsr-cdecurate,NCIP/cadsr-cdecurate
|
package gov.nih.nci.cadsr.cdecurate.tool;
import java.util.HashMap;
import java.util.Vector;
import gov.nih.nci.cadsr.cdecurate.ui.AltNamesDefsSession;
import gov.nih.nci.cadsr.cdecurate.util.DataManager;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class ValueDomainServlet extends CurationServlet {
public ValueDomainServlet() {
}
public ValueDomainServlet(HttpServletRequest req, HttpServletResponse res,
ServletContext sc) {
super(req, res, sc);
}
public void execute(ACRequestTypes reqType) throws Exception {
switch (reqType){
case newVDFromMenu:
doOpenCreateNewPages();
break;
case newVDfromForm:
doCreateVDActions();
break;
case editVD:
doEditVDActions();
break;
case createNewVD:
doOpenCreateVDPage();
break;
case validateVDFromForm:
doInsertVD();
break;
case viewVALUEDOMAIN:
doOpenViewPage();
break;
case viewVDPVSTab:
doViewPageTab();
break;
}
}
/**
* The doOpenCreateNewPages method will set some session attributes then forward the request to a Create page.
* Called from 'service' method where reqType is 'newDEFromMenu', 'newDECFromMenu', 'newVDFromMenu' Sets some
* initial session attributes. Calls 'getAC.getACList' to get the Data list from the database for the selected
* context. Sets session Bean and forwards the create page for the selected component.
* @throws Exception
*/
private void doOpenCreateNewPages() throws Exception
{
HttpSession session = m_classReq.getSession();
clearSessionAttributes(m_classReq, m_classRes);
this.clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
String context = (String) session.getAttribute("sDefaultContext"); // from Login.jsp
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, "nothing");
DataManager.setAttribute(session, "DDEAction", "nothing"); // reset from "CreateNewDEFComp"
DataManager.setAttribute(session, "sCDEAction", "nothing");
DataManager.setAttribute(session, "VDPageAction", "nothing");
DataManager.setAttribute(session, "DECPageAction", "nothing");
DataManager.setAttribute(session, "sDefaultContext", context);
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
DataManager.setAttribute(session, "originAction", "NewVDFromMenu");
DataManager.setAttribute(session, "LastMenuButtonPressed", "CreateVD");
VD_Bean m_VD = new VD_Bean();
m_VD.setVD_ASL_NAME("DRAFT NEW");
m_VD.setAC_PREF_NAME_TYPE("SYS");
DataManager.setAttribute(session, "m_VD", m_VD);
VD_Bean oldVD = new VD_Bean();
oldVD.setVD_ASL_NAME("DRAFT NEW");
oldVD.setAC_PREF_NAME_TYPE("SYS");
DataManager.setAttribute(session, "oldVDBean", oldVD);
EVS_Bean m_OC = new EVS_Bean();
DataManager.setAttribute(session, "m_OC", m_OC);
EVS_Bean m_PC = new EVS_Bean();
DataManager.setAttribute(session, "m_PC", m_PC);
EVS_Bean m_REP = new EVS_Bean();
DataManager.setAttribute(session, "m_REP", m_REP);
EVS_Bean m_OCQ = new EVS_Bean();
DataManager.setAttribute(session, "m_OCQ", m_OCQ);
EVS_Bean m_PCQ = new EVS_Bean();
DataManager.setAttribute(session, "m_PCQ", m_PCQ);
EVS_Bean m_REPQ = new EVS_Bean();
DataManager.setAttribute(session, "m_REPQ", m_REPQ);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
} // end of doOpenCreateNewPages
/**
* The doCreateVDActions method handles CreateVD or EditVD actions of the request. Called from 'service' method
* where reqType is 'newVDfromForm' Calls 'doValidateVD' if the action is Validate or submit. Calls 'doSuggestionDE'
* if the action is open EVS Window.
*
* @throws Exception
*/
private void doCreateVDActions() throws Exception
{
HttpSession session = m_classReq.getSession();
String sMenuAction = (String) m_classReq.getParameter("MenuAction");
if (sMenuAction != null)
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, sMenuAction);
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction ==null ) sAction ="";
DataManager.setAttribute(session, "VDPageAction", sAction); // store the page action in attribute
String sSubAction = (String) m_classReq.getParameter("VDAction");
DataManager.setAttribute(session, "VDAction", sSubAction);
String sOriginAction = (String) session.getAttribute("originAction");
//System.out.println("create vd " + sAction);
/* if (sAction.equals("changeContext"))
doChangeContext(req, res, "vd");
else */if (sAction.equals("validate"))
{
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
}
else if (sAction.equals("submit"))
doSubmitVD();
else if (sAction.equals("createPV") || sAction.equals("editPV") || sAction.equals("removePV"))
doOpenCreatePVPage(m_classReq, m_classRes, sAction, "createVD");
else if (sAction.equals("removePVandParent") || sAction.equals("removeParent"))
doRemoveParent(sAction, "createVD");
// else if (sAction.equals("searchPV"))
// doSearchPV(m_classReq, m_classRes);
else if (sAction.equals("createVM"))
doOpenCreateVMPage(m_classReq, m_classRes, "vd");
else if (sAction.equals("Enum") || sAction.equals("NonEnum"))
{
doSetVDPage("Create");
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
else if (sAction.equals("clearBoxes"))
{
String ret = clearEditsOnPage(sOriginAction, sMenuAction); // , "vdEdits");
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/*
* else if (sAction.equals("refreshCreateVD")) { doSelectParentVD(req, res); ForwardJSP(req, res,
* "/CreateVDPage.jsp"); return; }
*/else if (sAction.equals("UseSelection"))
{
String nameAction = "newName";
if (sMenuAction.equals("NewVDTemplate") || sMenuAction.equals("NewVDVersion"))
nameAction = "appendName";
doVDUseSelection(nameAction);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
return;
}
else if (sAction.equals("RemoveSelection"))
{
doRemoveBuildingBlocksVD();
// re work on the naming if new one
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
EVS_Bean nullEVS = null;
if (!sMenuAction.equals("NewVDTemplate") && !sMenuAction.equals("NewVDVersion"))
vd = (VD_Bean) this.getACNames(nullEVS, "Search", vd); // change only abbr pref name
else
vd = (VD_Bean) this.getACNames(nullEVS, "Remove", vd); // need to change the long name & def also
DataManager.setAttribute(session, "m_VD", vd);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
return;
}
else if (sAction.equals("changeNameType"))
{
this.doChangeVDNameType();
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/*
* else if (sAction.equals("CreateNonEVSRef")) { doNonEVSReference(req, res); ForwardJSP(req, res,
* "/CreateVDPage.jsp"); }
*/else if (sAction.equals("addSelectedCon"))
{
doSelectVMConcept(m_classReq, m_classRes, sAction);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
else if (sAction.equals("sortPV"))
{
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
String sField = (String) m_classReq.getParameter("pvSortColumn");
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
serAC.getVDPVSortedRows(vd,sField,"create",""); // call the method to sort pv attribute
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
return;
}
else if (sAction.equals("Store Alternate Names") || sAction.equals("Store Reference Documents"))
this.doMarkACBeanForAltRef(m_classReq, m_classRes, "ValueDomain", sAction, "createAC");
// add/edit or remove contacts
else if (sAction.equals("doContactUpd") || sAction.equals("removeContact"))
{
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
// capture all page attributes
m_setAC.setVDValueFromPage(m_classReq, m_classRes, VDBean);
VDBean.setAC_CONTACTS(this.doContactACUpdates(m_classReq, sAction));
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
// open the DE page or search page with
else if (sAction.equals("goBack"))
{
String sFor = goBackfromVD(sOriginAction, sMenuAction, "", "", "create");
ForwardJSP(m_classReq, m_classRes, sFor);
}
else if (sAction.equals("vdpvstab"))
{
DataManager.setAttribute(session, "TabFocus", "PV");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/PermissibleValue.jsp");
}
else if (sAction.equals("vddetailstab"))
{
DataManager.setAttribute(session, "TabFocus", "VD");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
}
/**
* The doEditDEActions method handles EditDE actions of the request. Called from 'service' method where reqType is
* 'EditVD' Calls 'ValidateDE' if the action is Validate or submit. Calls 'doSuggestionDE' if the action is open EVS
* Window.
*
* @throws Exception
*/
private void doEditVDActions() throws Exception
{
HttpSession session = m_classReq.getSession();
String sMenuAction = (String) m_classReq.getParameter("MenuAction");
if (sMenuAction != null)
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, sMenuAction);
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction ==null ) sAction ="";
DataManager.setAttribute(session, "VDPageAction", sAction); // store the page action in attribute
String sSubAction = (String) m_classReq.getParameter("VDAction");
DataManager.setAttribute(session, "VDAction", sSubAction);
String sButtonPressed = (String) session.getAttribute("LastMenuButtonPressed");
String sSearchAC = (String) session.getAttribute("SearchAC");
if (sSearchAC == null)
sSearchAC = "";
String sOriginAction = (String) session.getAttribute("originAction");
if (sAction.equals("submit"))
doSubmitVD();
else if (sAction.equals("validate") && sOriginAction.equals("BlockEditVD"))
doValidateVDBlockEdit();
else if (sAction.equals("validate"))
{
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
}
else if (sAction.equals("suggestion"))
doSuggestionDE(m_classReq, m_classRes);
else if (sAction.equals("UseSelection"))
{
String nameAction = "appendName";
if (sOriginAction.equals("BlockEditVD"))
nameAction = "blockName";
doVDUseSelection(nameAction);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
return;
}
else if (sAction.equals("RemoveSelection"))
{
doRemoveBuildingBlocksVD();
// re work on the naming if new one
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
EVS_Bean nullEVS = null;
vd = (VD_Bean) this.getACNames(nullEVS, "Remove", vd); // change only abbr pref name
DataManager.setAttribute(session, "m_VD", vd);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
return;
}
else if (sAction.equals("changeNameType"))
{
this.doChangeVDNameType();
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("sortPV"))
{
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
String sField = (String) m_classReq.getParameter("pvSortColumn");
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
serAC.getVDPVSortedRows(vd,sField,"edit",""); // call the method to sort pv attribute
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
return;
}
else if (sAction.equals("createPV") || sAction.equals("editPV") || sAction.equals("removePV"))
doOpenCreatePVPage(m_classReq, m_classRes, sAction, "editVD");
else if (sAction.equals("removePVandParent") || sAction.equals("removeParent"))
doRemoveParent(sAction, "editVD");
else if (sAction.equals("addSelectedCon"))
{
doSelectVMConcept(m_classReq, m_classRes, sAction);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("Enum") || sAction.equals("NonEnum"))
{
doSetVDPage("Edit");
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("Store Alternate Names") || sAction.equals("Store Reference Documents"))
this.doMarkACBeanForAltRef(m_classReq, m_classRes, "ValueDomain", sAction, "editAC");
// add/edit or remove contacts
else if (sAction.equals("doContactUpd") || sAction.equals("removeContact"))
{
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
// capture all page attributes
m_setAC.setVDValueFromPage(m_classReq, m_classRes, VDBean);
VDBean.setAC_CONTACTS(this.doContactACUpdates(m_classReq, sAction));
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("clearBoxes"))
{
String ret = clearEditsOnPage(sOriginAction, sMenuAction); // , "vdEdits");
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
// open the Edit DE page or search page with
else if (sAction.equals("goBack"))
{
String sFor = goBackfromVD(sOriginAction, sMenuAction, sSearchAC, sButtonPressed, "edit");
ForwardJSP(m_classReq, m_classRes, sFor);
}
else if (sAction.equals("vdpvstab"))
{
DataManager.setAttribute(session, "TabFocus", "PV");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/PermissibleValue.jsp");
}
else if (sAction.equals("vddetailstab"))
{
DataManager.setAttribute(session, "TabFocus", "VD");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
}
/**
* changes the dec name type as selected
*
* @param sOrigin
* string of origin action of the ac
* @throws java.lang.Exception
*/
private void doChangeVDNameType() throws Exception
{
HttpSession session = m_classReq.getSession();
// get teh selected type from teh page
VD_Bean pageVD = (VD_Bean) session.getAttribute("m_VD");
m_setAC.setVDValueFromPage(m_classReq, m_classRes, pageVD); // capture all other attributes
String sSysName = pageVD.getAC_SYS_PREF_NAME();
String sAbbName = pageVD.getAC_ABBR_PREF_NAME();
String sUsrName = pageVD.getAC_USER_PREF_NAME();
String sNameType = (String) m_classReq.getParameter("rNameConv");
if (sNameType == null || sNameType.equals(""))
sNameType = "SYS"; // default
// get the existing preferred name to make sure earlier typed one is saved in the user
String sPrefName = (String) m_classReq.getParameter("txtPreferredName");
if (sPrefName != null && !sPrefName.equals("") && !sPrefName.equals("(Generated by the System)")
&& !sPrefName.equals(sSysName) && !sPrefName.equals(sAbbName))
pageVD.setAC_USER_PREF_NAME(sPrefName); // store typed one in de bean
// reset system generated or abbr accoring
if (sNameType.equals("SYS"))
{
if (sSysName == null)
sSysName = "";
// limit to 30 characters
if (sSysName.length() > 30)
sSysName = sSysName.substring(sSysName.length() - 30);
pageVD.setVD_PREFERRED_NAME(sSysName);
}
else if (sNameType.equals("ABBR"))
pageVD.setVD_PREFERRED_NAME(sAbbName);
else if (sNameType.equals("USER"))
pageVD.setVD_PREFERRED_NAME(sUsrName);
pageVD.setAC_PREF_NAME_TYPE(sNameType); // store the type in the bean
// logger.debug(pageVD.getAC_PREF_NAME_TYPE() + " pref " + pageVD.getVD_PREFERRED_NAME());
DataManager.setAttribute(session, "m_VD", pageVD);
}
/**
* Does open editVD page action from DE page called from 'doEditDEActions' method. Calls
* 'm_setAC.setDEValueFromPage' to store the DE bean for later use Using the VD idseq, calls 'SerAC.search_VD'
* method to gets dec attributes to populate. stores VD bean in session and opens editVD page. goes back to editDE
* page if any error.
*
* @throws Exception
*/
public void doOpenEditVDPage() throws Exception
{
HttpSession session = m_classReq.getSession();
DE_Bean m_DE = (DE_Bean) session.getAttribute("m_DE");
if (m_DE == null)
m_DE = new DE_Bean();
// store the de values in the session
m_setAC.setDEValueFromPage(m_classReq, m_classRes, m_DE);
DataManager.setAttribute(session, "m_DE", m_DE);
this.clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
String sVDID = null;
String sVDid[] = m_classReq.getParameterValues("selVD");
if (sVDid != null)
sVDID = sVDid[0];
// get the dec bean for this id
if (sVDID != null)
{
Vector vList = new Vector();
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.doVDSearch(sVDID, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", "", "", "", "", "",
"", "", vList, "0");
// forward editVD page with this bean
if (vList.size() > 0)
{
for (int i = 0; i < vList.size(); i++)
{
VD_Bean VDBean = new VD_Bean();
VDBean = (VD_Bean) vList.elementAt(i);
// check if the user has write permission
String contID = VDBean.getVD_CONTE_IDSEQ();
String sUser = (String) session.getAttribute("Username");
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
String hasPermit = getAC.hasPrivilege("Create", sUser, "vd", contID);
// forward to editVD if has write permission
if (hasPermit.equals("Yes"))
{
String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
VDBean = serAC.getVDAttributes(VDBean, "Edit", sMenuAction); // get VD other Attributes
DataManager.setAttribute(session, "m_VD", VDBean);
VD_Bean oldVD = new VD_Bean();
oldVD = oldVD.cloneVD_Bean(VDBean);
DataManager.setAttribute(session, "oldVDBean", oldVD);
// DataManager.setAttribute(session, "oldVDBean", VDBean);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp"); // forward to editVD page
}
// go back to editDE with message if no permission
else
{
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, "No edit permission in "
+ VDBean.getVD_CONTEXT_NAME() + " context");
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp"); // forward to editDE page
}
break;
}
}
// display error message and back to edit DE page
else
{
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE,
"Unable to get Existing VD attributes from the database");
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp"); // forward to editDE page
}
}
// display error message and back to editDE page
else
{
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, "Unable to get the VDid from the page");
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp"); // forward to editDE page
}
}// end doEditDECAction
/**
* Called from doCreateVDActions. Calls 'setAC.setVDValueFromPage' to set the VD data from the page. Calls
* 'setAC.setValidatePageValuesVD' to validate the data. Loops through the vector vValidate to check if everything
* is valid and Calls 'doInsertVD' to insert the data. If vector contains invalid fields, forwards to validation
* page
*
* @throws Exception
*/
private void doSubmitVD() throws Exception
{
HttpSession session = m_classReq.getSession();
DataManager.setAttribute(session, "sVDAction", "validate");
VD_Bean m_VD = new VD_Bean();
EVS_Bean m_OC = new EVS_Bean();
EVS_Bean m_PC = new EVS_Bean();
EVS_Bean m_REP = new EVS_Bean();
EVS_Bean m_OCQ = new EVS_Bean();
EVS_Bean m_PCQ = new EVS_Bean();
EVS_Bean m_REPQ = new EVS_Bean();
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
m_OC = (EVS_Bean) session.getAttribute("m_OC");
m_PC = (EVS_Bean) session.getAttribute("m_PC");
m_OCQ = (EVS_Bean) session.getAttribute("m_OCQ");
m_PCQ = (EVS_Bean) session.getAttribute("m_PCQ");
m_REP = (EVS_Bean) session.getAttribute("m_REP");
m_REPQ = (EVS_Bean) session.getAttribute("m_REPQ");
m_setAC.setValidatePageValuesVD(m_classReq, m_classRes, m_VD, m_OC, m_PC, m_REP, m_OCQ, m_PCQ, m_REPQ, getAC);
DataManager.setAttribute(session, "m_VD", m_VD);
boolean isValid = true;
Vector vValidate = new Vector();
vValidate = (Vector) m_classReq.getAttribute("vValidate");
if (vValidate == null)
isValid = false;
else
{
for (int i = 0; vValidate.size() > i; i = i + 3)
{
String sStat = (String) vValidate.elementAt(i + 2);
if (sStat.equals("Valid") == false)
{
isValid = false;
}
}
}
if (isValid == false)
{
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
}
else
{
doInsertVD();
}
} // end of doSumitVD
/**
* The doValidateVD method gets the values from page the user filled out, validates the input, then forwards results
* to the Validate page Called from 'doCreateVDActions', 'doSubmitVD' method. Calls 'setAC.setVDValueFromPage' to
* set the data from the page to the bean. Calls 'setAC.setValidatePageValuesVD' to validate the data. Stores 'm_VD'
* bean in session. Forwards the page 'ValidateVDPage.jsp' with validation vector to display.
*
* @throws Exception
*/
private void doValidateVD() throws Exception
{
HttpSession session = m_classReq.getSession();
String oldRepIdseq = (String)session.getAttribute("oldRepIdseq");
String checkValidityRep = "Yes";
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction == null)
sAction = "";
String sOriginAction = (String) session.getAttribute("originAction");
// do below for versioning to check whether these two have changed
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
EVS_Bean m_OC = new EVS_Bean();
EVS_Bean m_PC = new EVS_Bean();
EVS_Bean m_REP = new EVS_Bean();
EVS_Bean m_OCQ = new EVS_Bean();
EVS_Bean m_PCQ = new EVS_Bean();
EVS_Bean m_REPQ = new EVS_Bean();
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
DataManager.setAttribute(session, "VDPageAction", "validate"); // store the page action in attribute
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
if (sOriginAction!= null && !sOriginAction.equals("NewVDFromMenu")){
if (m_VD.getVD_REP_IDSEQ() != null && !m_VD.getVD_REP_IDSEQ().equals("") && m_VD.getVD_REP_IDSEQ().equals(oldRepIdseq)){
checkValidityRep = "No";
}
}
DataManager.setAttribute(session, "checkValidityRep", checkValidityRep);
m_OC = (EVS_Bean) session.getAttribute("m_OC");
m_PC = (EVS_Bean) session.getAttribute("m_PC");
m_OCQ = (EVS_Bean) session.getAttribute("m_OCQ");
m_PCQ = (EVS_Bean) session.getAttribute("m_PCQ");
m_REP = (EVS_Bean) session.getAttribute("m_REP");
m_REPQ = (EVS_Bean) session.getAttribute("m_REPQ");
m_setAC.setValidatePageValuesVD(m_classReq, m_classRes, m_VD, m_OC, m_PC, m_REP, m_OCQ, m_PCQ, m_REPQ, getAC);
DataManager.setAttribute(session, "m_VD", m_VD);
/*
* if(sAction.equals("Enum") || sAction.equals("NonEnum") || sAction.equals("EnumByRef")) ForwardJSP(m_classReq, m_classRes,
* "/CreateVDPage.jsp"); else if (!sAction.equals("vdpvstab") && !sAction.equals("vddetailstab"))
* ForwardJSP(req, res, "/ValidateVDPage.jsp");
*/} // end of doValidateVD
/**
* The doSetVDPage method gets the values from page the user filled out, Calls 'setAC.setVDValueFromPage' to set the
* data from the page to the bean. Stores 'm_VD' bean in session. Forwards the page 'CreateVDPage.jsp' with
* validation vector to display.
*
* @param sOrigin
* origin where it is called from
*
* @throws Exception
*/
private void doSetVDPage(String sOrigin) throws Exception
{
try
{
HttpSession session = m_classReq.getSession();
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction == null)
sAction = "";
// do below for versioning to check whether these two have changed
VD_Bean vdBean = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
m_setAC.setVDValueFromPage(m_classReq, m_classRes, vdBean);
// check if pvs are used in the form when type is changed to non enumerated.
if (!sAction.equals("Enum"))
{
// get vdid from the bean
// VD_Bean vdBean = (VD_Bean)session.getAttribute("m_VD");
String sVDid = vdBean.getVD_VD_IDSEQ();
boolean isExist = false;
if (sOrigin.equals("Edit"))
{
// call function to check if relationship exists
SetACService setAC = new SetACService(this);
isExist = setAC.checkPVQCExists(m_classReq, m_classRes, sVDid, "");
if (isExist)
{
String sMsg = "Unable to change Value Domain type to Non-Enumerated "
+ "because one or more Permissible Values are being used in a Case Report Form. \\n"
+ "Please create a new version of this Value Domain to change the type to Non-Enumerated.";
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, sMsg);
vdBean.setVD_TYPE_FLAG("E");
DataManager.setAttribute(session, "m_VD", vdBean);
}
}
// mark all the pvs as deleted to remove them while submitting.
if (!isExist)
{
Vector<PV_Bean> vVDPVs = vdBean.getVD_PV_List(); // (Vector)session.getAttribute("VDPVList");
if (vVDPVs != null)
{
// set each bean as deleted to handle later
Vector<PV_Bean> vRemVDPV = vdBean.getRemoved_VDPVList();
if (vRemVDPV == null)
vRemVDPV = new Vector<PV_Bean>();
for (int i = 0; i < vVDPVs.size(); i++)
{
PV_Bean pvBean = (PV_Bean) vVDPVs.elementAt(i);
vRemVDPV.addElement(pvBean);
}
vdBean.setRemoved_VDPVList(vRemVDPV);
vdBean.setVD_PV_List(new Vector<PV_Bean>());
}
}
}
else
{
// remove meta parents since it is not needed for enum types
Vector<EVS_Bean> vParentCon = vdBean.getReferenceConceptList(); // (Vector)session.getAttribute("VDParentConcept");
if (vParentCon == null)
vParentCon = new Vector<EVS_Bean>();
for (int i = 0; i < vParentCon.size(); i++)
{
EVS_Bean ePar = (EVS_Bean) vParentCon.elementAt(i);
if (ePar == null)
ePar = new EVS_Bean();
String parDB = ePar.getEVS_DATABASE();
// System.out.println(i + " setvdpage " + parDB);
if (parDB != null && parDB.equals("NCI Metathesaurus"))
{
ePar.setCON_AC_SUBMIT_ACTION("DEL");
vParentCon.setElementAt(ePar, i);
}
}
vdBean.setReferenceConceptList(vParentCon);
DataManager.setAttribute(session, "m_VD", vdBean);
// get back pvs associated with this vd
VD_Bean oldVD = (VD_Bean) session.getAttribute("oldVDBean");
if (oldVD == null)
oldVD = new VD_Bean();
if (oldVD.getVD_TYPE_FLAG() != null && oldVD.getVD_TYPE_FLAG().equals("E"))
{
if (oldVD.getVD_VD_IDSEQ() != null && !oldVD.getVD_VD_IDSEQ().equals(""))
{
// String pvAct = "Search";
String sMenu = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
// if (sMenu.equals("NewVDTemplate"))
// pvAct = "NewUsing";
// Integer pvCount = new Integer(0);
vdBean.setVD_PV_List(oldVD.cloneVDPVVector(oldVD.getVD_PV_List()));
vdBean.setRemoved_VDPVList(new Vector<PV_Bean>());
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
if (sMenu.equals("Questions"))
serAC.getACQuestionValue(vdBean);
}
}
}
DataManager.setAttribute(session, "m_VD", vdBean);
}
catch (Exception e)
{
logger.error("Error - doSetVDPage " + e.toString(), e);
}
} // end of doValidateVD
/**
* makes the vd's system generated name
*
* @param vd
* current vd bean
* @param vParent
* vector of seelected parents
* @return modified vd bean
*/
public AC_Bean getSystemName(AC_Bean ac, Vector<EVS_Bean> vParent)
{
VD_Bean vd = (VD_Bean)ac;
try
{
// make the system generated name
String sysName = "";
for (int i = vParent.size() - 1; i > -1; i--)
{
EVS_Bean par = (EVS_Bean) vParent.elementAt(i);
String evsDB = par.getEVS_DATABASE();
String subAct = par.getCON_AC_SUBMIT_ACTION();
if (subAct != null && !subAct.equals("DEL") && evsDB != null && !evsDB.equals("Non_EVS"))
{
// add the concept id to sysname if less than 20 characters
if (sysName.equals("") || sysName.length() < 20)
sysName += par.getCONCEPT_IDENTIFIER() + ":";
else
break;
}
}
// append vd public id and version in the end
if (vd.getVD_VD_ID() != null)
sysName += vd.getVD_VD_ID();
String sver = vd.getVD_VERSION();
if (sver != null && sver.indexOf(".") < 0)
sver += ".0";
if (vd.getVD_VERSION() != null)
sysName += "v" + sver;
// limit to 30 characters
if (sysName.length() > 30)
sysName = sysName.substring(sysName.length() - 30);
vd.setAC_SYS_PREF_NAME(sysName); // store it in vd bean
// make system name preferrd name if sys was selected
String selNameType = (String) m_classReq.getParameter("rNameConv");
// get it from the vd bean if null
if (selNameType == null)
{
selNameType = vd.getVD_TYPE_NAME();
}
else
{
// store the keyed in text in the user field for later use.
String sPrefName = (String) m_classReq.getParameter("txPreferredName");
if (selNameType != null && selNameType.equals("USER") && sPrefName != null)
vd.setAC_USER_PREF_NAME(sPrefName);
}
if (selNameType != null && selNameType.equals("SYS"))
vd.setVD_PREFERRED_NAME(sysName);
}
catch (Exception e)
{
this.logger.error("ERROR - getSystemName : " + e.toString(), e);
}
return vd;
}
/**
* marks the parent and/or its pvs as deleted from the session.
*
* @param sPVAction
* @param vdPage
* @throws java.lang.Exception
*/
private void doRemoveParent(String sPVAction, String vdPage) throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
Vector<EVS_Bean> vParentCon = m_VD.getReferenceConceptList(); // (Vector)session.getAttribute("VDParentConcept");
if (vParentCon == null)
vParentCon = new Vector<EVS_Bean>();
// get the selected parent info from teh request
String sParentCC = (String) m_classReq.getParameter("selectedParentConceptCode");
String sParentName = (String) m_classReq.getParameter("selectedParentConceptName");
String sParentDB = (String) m_classReq.getParameter("selectedParentConceptDB");
// for non evs parent compare the long names instead
if (sParentName != null && !sParentName.equals("") && sParentDB != null && sParentDB.equals("Non_EVS"))
sParentCC = sParentName;
if (sParentCC != null)
{
for (int i = 0; i < vParentCon.size(); i++)
{
EVS_Bean eBean = (EVS_Bean) vParentCon.elementAt(i);
if (eBean == null)
eBean = new EVS_Bean();
String thisParent = eBean.getCONCEPT_IDENTIFIER();
if (thisParent == null)
thisParent = "";
String thisParentName = eBean.getLONG_NAME();
if (thisParentName == null)
thisParentName = "";
String thisParentDB = eBean.getEVS_DATABASE();
if (thisParentDB == null)
thisParentDB = "";
// for non evs parent compare the long names instead
if (sParentDB != null && sParentDB.equals("Non_EVS"))
thisParent = thisParentName;
// look for the matched parent from the vector to remove
if (sParentCC.equals(thisParent))
{
@SuppressWarnings("unused") String strHTML = "";
EVSMasterTree tree = new EVSMasterTree(m_classReq, thisParentDB, this);
strHTML = tree.refreshTree(thisParentName, "false");
strHTML = tree.refreshTree("parentTree" + thisParentName, "false");
if (sPVAction.equals("removePVandParent"))
{
Vector<PV_Bean> vVDPVList = m_VD.getVD_PV_List(); // (Vector)session.getAttribute("VDPVList");
if (vVDPVList == null)
vVDPVList = new Vector<PV_Bean>();
// loop through the vector of pvs to get matched parent
for (int j = 0; j < vVDPVList.size(); j++)
{
PV_Bean pvBean = (PV_Bean) vVDPVList.elementAt(j);
if (pvBean == null)
pvBean = new PV_Bean();
EVS_Bean pvParent = (EVS_Bean) pvBean.getPARENT_CONCEPT();
if (pvParent == null)
pvParent = new EVS_Bean();
String pvParCon = pvParent.getCONCEPT_IDENTIFIER();
// match the parent concept with the pv's parent concept
if (thisParent.equals(pvParCon))
{
pvBean.setVP_SUBMIT_ACTION("DEL"); // mark the vp as deleted
// String pvID = pvBean.getPV_PV_IDSEQ();
vVDPVList.setElementAt(pvBean, j);
}
}
m_VD.setVD_PV_List(vVDPVList);
// DataManager.setAttribute(session, "VDPVList", vVDPVList);
}
// mark the parent as delected and leave
eBean.setCON_AC_SUBMIT_ACTION("DEL");
vParentCon.setElementAt(eBean, i);
break;
}
}
}
// DataManager.setAttribute(session, "VDParentConcept", vParentCon);
m_VD.setReferenceConceptList(vParentCon);
// make sure all other changes are stored back in vd
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
// make vd's system preferred name
m_VD = (VD_Bean) this.getSystemName(m_VD, vParentCon);
DataManager.setAttribute(session, "m_VD", m_VD);
// make the selected parent in hte session empty
DataManager.setAttribute(session, "SelectedParentName", "");
DataManager.setAttribute(session, "SelectedParentCC", "");
DataManager.setAttribute(session, "SelectedParentDB", "");
DataManager.setAttribute(session, "SelectedParentMetaSource", "");
// forward teh page according to vdPage
if (vdPage.equals("editVD"))
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
else
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/**
* splits the vd rep term from cadsr into individual concepts
*
* @param sComp
* name of the searched component
* @param m_Bean
* selected EVS bean
* @param nameAction
* string naming action
*
*/
private void splitIntoConceptsVD(String sComp, EVS_Bean m_Bean,String nameAction)
{
try
{
HttpSession session = m_classReq.getSession();
// String sSelRow = "";
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD");
if (m_VD == null)
m_VD = new VD_Bean();
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
Vector vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector();
String sCondr = m_Bean.getCONDR_IDSEQ();
String sLongName = m_Bean.getLONG_NAME();
String sIDSEQ = m_Bean.getIDSEQ();
if (sComp.equals("RepTerm") || sComp.equals("RepQualifier"))
{
m_VD.setVD_REP_TERM(sLongName);
m_VD.setVD_REP_IDSEQ(sIDSEQ);
}
// String sRepTerm = m_VD.getVD_REP_TERM();
if (sCondr != null && !sCondr.equals(""))
{
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
Vector vCon = getAC.getAC_Concepts(sCondr, null, true);
if (vCon != null && vCon.size() > 0)
{
for (int j = 0; j < vCon.size(); j++)
{
EVS_Bean bean = new EVS_Bean();
bean = (EVS_Bean) vCon.elementAt(j);
if (bean != null)
{
if (j == 0) // Primary Concept
m_VD = this.addRepConcepts(nameAction, m_VD, bean, "Primary");
else
// Secondary Concepts
m_VD = this.addRepConcepts(nameAction, m_VD, bean, "Qualifier");
}
}
}
}
}
catch (Exception e)
{
this.logger.error("ERROR - splitintoConceptVD : " + e.toString(), e);
}
}
/**
* this method is used to create preferred name for VD names of all three types will be stored in the bean for later
* use if type is changed, it populates name according to type selected.
*
* @param newBean
* new EVS bean to append the name to
* @param nameAct
* string new name or append name
* @param pageVD
* current vd bean
* @return VD bean
*/
public AC_Bean getACNames(EVS_Bean newBean, String nameAct, AC_Bean pageAC)
{
HttpSession session = m_classReq.getSession();
VD_Bean pageVD = (VD_Bean)pageAC;
if (pageVD == null)
pageVD = (VD_Bean) session.getAttribute("m_VD");
// get vd object class and property names
String sLongName = "";
String sPrefName = "";
String sAbbName = "";
String sDef = "";
// get the existing one if not restructuring the name but appending it
if (newBean != null)
{
sLongName = pageVD.getVD_LONG_NAME();
if (sLongName == null)
sLongName = "";
sDef = pageVD.getVD_PREFERRED_DEFINITION();
if (sDef == null)
sDef = "";
}
// get the typed text on to user name
String selNameType = "";
if (nameAct.equals("Search") || nameAct.equals("Remove"))
{
selNameType = (String) m_classReq.getParameter("rNameConv");
sPrefName = (String) m_classReq.getParameter("txPreferredName");
if (selNameType != null && selNameType.equals("USER") && sPrefName != null)
pageVD.setAC_USER_PREF_NAME(sPrefName);
}
// get the object class into the long name and abbr name
String sObjClass = pageVD.getVD_OBJ_CLASS();
if (sObjClass == null)
sObjClass = "";
if (!sObjClass.equals(""))
{
// rearrange it long name
if (newBean == null)
{
if (!sLongName.equals(""))
sLongName += " "; // add extra space if not empty
sLongName += sObjClass;
EVS_Bean mOC = (EVS_Bean) session.getAttribute("m_OC");
if (mOC != null)
{
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += mOC.getPREFERRED_DEFINITION();
}
}
if (!sAbbName.equals(""))
sAbbName += "_"; // add underscore if not empty
if (sObjClass.length() > 3)
sAbbName += sObjClass.substring(0, 4); // truncate to 4 letters
else
sAbbName = sObjClass;
}
// get the property into the long name and abbr name
String sPropClass = pageVD.getVD_PROP_CLASS();
if (sPropClass == null)
sPropClass = "";
if (!sPropClass.equals(""))
{
// rearrange it long name
if (newBean == null)
{
if (!sLongName.equals(""))
sLongName += " "; // add extra space if not empty
sLongName += sPropClass;
EVS_Bean mPC = (EVS_Bean) session.getAttribute("m_PC");
if (mPC != null)
{
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += mPC.getPREFERRED_DEFINITION();
}
}
if (!sAbbName.equals(""))
sAbbName += "_"; // add underscore if not empty
if (sPropClass.length() > 3)
sAbbName += sPropClass.substring(0, 4); // truncate to 4 letters
else
sAbbName += sPropClass;
}
Vector vRep = (Vector) session.getAttribute("vRepTerm");
if (vRep == null)
vRep = new Vector();
// add the qualifiers first
for (int i = 1; vRep.size() > i; i++)
{
EVS_Bean eCon = (EVS_Bean) vRep.elementAt(i);
if (eCon == null)
eCon = new EVS_Bean();
String conName = eCon.getLONG_NAME();
if (conName == null)
conName = "";
if (!conName.equals(""))
{
// rearrange it long name and definition
if (newBean == null)
{
String nvpValue = "";
if (this.checkNVP(eCon))
nvpValue = "::" + eCon.getNVP_CONCEPT_VALUE();
if (!sLongName.equals(""))
sLongName += " ";
sLongName += conName + nvpValue;
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += eCon.getPREFERRED_DEFINITION() + nvpValue;
}
if (!sAbbName.equals(""))
sAbbName += "_";
if (conName.length() > 3)
sAbbName += conName.substring(0, 4); // truncate to four letters
else
sAbbName += conName;
}
}
// add the primary
if (vRep != null && vRep.size() > 0)
{
EVS_Bean eCon = (EVS_Bean) vRep.elementAt(0);
if (eCon == null)
eCon = new EVS_Bean();
String sPrimary = eCon.getLONG_NAME();
if (sPrimary == null)
sPrimary = "";
if (!sPrimary.equals(""))
{
// rearrange it only long name and definition
if (newBean == null)
{
String nvpValue = "";
if (this.checkNVP(eCon))
nvpValue = "::" + eCon.getNVP_CONCEPT_VALUE();
if (!sLongName.equals(""))
sLongName += " ";
sLongName += sPrimary + nvpValue;
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += eCon.getPREFERRED_DEFINITION() + nvpValue;
}
if (!sAbbName.equals(""))
sAbbName += "_";
if (sPrimary.length() > 3)
sAbbName += sPrimary.substring(0, 4); // truncate to four letters
else
sAbbName += sPrimary;
}
}
// truncate to 30 characters
if (sAbbName != null && sAbbName.length() > 30)
sAbbName = sAbbName.substring(0, 30);
// add the abbr name to vd bean and page is selected
pageVD.setAC_ABBR_PREF_NAME(sAbbName);
// make abbr name name preferrd name if sys was selected
if (selNameType != null && selNameType.equals("ABBR"))
pageVD.setVD_PREFERRED_NAME(sAbbName);
if (newBean != null) // appending to the existing;
{
String sSelectName = newBean.getLONG_NAME();
if (!sLongName.equals(""))
sLongName += " ";
sLongName += sSelectName;
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += newBean.getPREFERRED_DEFINITION();
}
// store the long names, definition, and usr name in vd bean if searched
if (nameAct.equals("Search"))
{
pageVD.setVD_LONG_NAME(sLongName);
pageVD.setVD_PREFERRED_DEFINITION(sDef);
pageVD.setVDNAME_CHANGED(true);
}
return pageVD;
}
/**
*
* @param nameAction
* stirng name action
*
*/
private void doVDUseSelection(String nameAction)
{
try
{
HttpSession session = m_classReq.getSession();
String sSelRow = "";
boolean selectedRepQualifiers = false;
// InsACService insAC = new InsACService(req, res, this);
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD");
if (m_VD == null)
m_VD = new VD_Bean();
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
Vector<EVS_Bean> vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector<EVS_Bean>();
if (vRepTerm.size()>1){
selectedRepQualifiers = true;
}
Vector vAC = new Vector();
EVS_Bean m_REP = new EVS_Bean();
String sComp = (String) m_classReq.getParameter("sCompBlocks");
// get rep term components
if (sComp.equals("RepTerm") || sComp.equals("RepQualifier"))
{
sSelRow = (String) m_classReq.getParameter("selRepRow");
// vAC = (Vector)session.getAttribute("vRepResult");
vAC = (Vector) session.getAttribute("vACSearch");
if (vAC == null)
vAC = new Vector();
if (sSelRow != null && !sSelRow.equals(""))
{
String sObjRow = sSelRow.substring(2);
Integer intObjRow = new Integer(sObjRow);
int intObjRow2 = intObjRow.intValue();
if (vAC.size() > intObjRow2 - 1)
m_REP = (EVS_Bean) vAC.elementAt(intObjRow2);
// get name value pari
String sNVP = (String) m_classReq.getParameter("nvpConcept");
if (sNVP != null && !sNVP.equals(""))
{
m_REP.setNVP_CONCEPT_VALUE(sNVP);
String sName = m_REP.getLONG_NAME();
m_REP.setLONG_NAME(sName + "::" + sNVP);
m_REP.setPREFERRED_DEFINITION(m_REP.getPREFERRED_DEFINITION() + "::" + sNVP);
}
//System.out.println(sNVP + sComp + m_REP.getLONG_NAME());
}
else
{
storeStatusMsg("Unable to get the selected row from the Rep Term search results.");
return;
}
// send it back if unable to obtion the concept
if (m_REP == null || m_REP.getLONG_NAME() == null)
{
storeStatusMsg("Unable to obtain concept from the selected row of the " + sComp
+ " search results.\\n" + "Please try again.");
return;
}
// handle the primary search
if (sComp.equals("RepTerm"))
{
if (m_REP.getEVS_DATABASE().equals("caDSR"))
{
// split it if rep term, add concept class to the list if evs id exists
if (m_REP.getCONDR_IDSEQ() == null || m_REP.getCONDR_IDSEQ().equals(""))
{
if (m_REP.getCONCEPT_IDENTIFIER() == null || m_REP.getCONCEPT_IDENTIFIER().equals(""))
{
storeStatusMsg("This Rep Term is not associated to a concept, so the data is suspect. \\n"
+ "Please choose another Rep Term.");
}
else
m_VD = this.addRepConcepts(nameAction, m_VD, m_REP, "Primary");
}
else
splitIntoConceptsVD(sComp, m_REP, nameAction);
}
else
m_VD = this.addRepConcepts(nameAction, m_VD, m_REP, "Primary");
}
else if (sComp.equals("RepQualifier"))
{
// Do this to reserve zero position in vector for primary concept
if (vRepTerm.size() < 1)
{
EVS_Bean OCBean = new EVS_Bean();
vRepTerm.addElement(OCBean);
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
}
m_VD.setVD_REP_IDSEQ("");
m_VD = this.addRepConcepts(nameAction, m_VD, m_REP, "Qualifier");
}
}
else
{
EVS_Bean eBean = this.getEVSSelRow(m_classReq);
if (eBean != null && eBean.getLONG_NAME() != null)
{
/* if (sComp.equals("VDObjectClass"))
{
m_VD.setVD_OBJ_CLASS(eBean.getLONG_NAME());
DataManager.setAttribute(session, "m_OC", eBean);
}
else if (sComp.equals("VDPropertyClass"))
{
m_VD.setVD_PROP_CLASS(eBean.getLONG_NAME());
DataManager.setAttribute(session, "m_PC", eBean);
}
*/ if (nameAction.equals("appendName"))
m_VD = (VD_Bean) this.getACNames(eBean, "Search", m_VD);
}
}
vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm != null && vRepTerm.size() > 0){
vRepTerm = this.getMatchingThesarusconcept(vRepTerm, "Representation Term");
m_VD = this.updateRepAttribues(vRepTerm, m_VD);
}
if (m_REP.getcaDSR_COMPONENT()!= null && m_REP.getcaDSR_COMPONENT().equals("Concept Class")){
m_VD.setVD_REP_IDSEQ("");
}else{//Rep Term or from vocabulary
if(m_REP.getcaDSR_COMPONENT()!= null && !selectedRepQualifiers){//if selected existing rep term
ValidationStatusBean statusBean = new ValidationStatusBean();
statusBean.setStatusMessage("** Using existing "+m_REP.getcaDSR_COMPONENT()+" "+m_REP.getLONG_NAME()+" ("+m_REP.getID()+"v"+m_REP.getVERSION()+") from "+m_REP.getCONTEXT_NAME());
statusBean.setCondrExists(true);
statusBean.setCondrIDSEQ(m_REP.getCONDR_IDSEQ());
statusBean.setEvsBeanExists(true);
statusBean.setEvsBeanIDSEQ(m_REP.getIDSEQ());
session.setAttribute("vdStatusBean", statusBean);
}else{
m_VD.setVD_REP_IDSEQ("");
}
}
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
// rebuild new name if not appending
EVS_Bean nullEVS = null;
if (nameAction.equals("newName"))
m_VD = (VD_Bean) this.getACNames(nullEVS, "Search", m_VD);
else if (nameAction.equals("blockName"))
m_VD = (VD_Bean) this.getACNames(nullEVS, "blockName", m_VD);
DataManager.setAttribute(session, "m_VD", m_VD);
}
catch (Exception e)
{
this.logger.error("ERROR - doVDUseSelection : " + e.toString(), e);
}
} // end of doVDUseSelection
/**
* adds the selected concept to the vector of concepts for property
*
* @param nameAction
* String naming action
* @param vdBean
* selected DEC_Bean
* @param eBean
* selected EVS_Bean
* @param repType
* String property type (primary or qualifier)
* @return DEC_Bean
* @throws Exception
*/
@SuppressWarnings("unchecked")
private VD_Bean addRepConcepts(String nameAction, VD_Bean vdBean,
EVS_Bean eBean, String repType) throws Exception
{
HttpSession session = m_classReq.getSession();
// add the concept bean to the OC vector and store it in the vector
Vector<EVS_Bean> vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector<EVS_Bean>();
// get the evs user bean
EVS_UserBean eUser = (EVS_UserBean) this.sessionData.EvsUsrBean; // (EVS_UserBean)session.getAttribute(EVSSearch.EVS_USER_BEAN_ARG);
// //("EvsUserBean");
if (eUser == null)
eUser = new EVS_UserBean();
eBean.setCON_AC_SUBMIT_ACTION("INS");
eBean.setCONTE_IDSEQ(vdBean.getVD_CONTE_IDSEQ());
String eDB = eBean.getEVS_DATABASE();
if (eDB != null && eBean.getEVS_ORIGIN() != null && eDB.equalsIgnoreCase("caDSR"))
{
eDB = eBean.getVocabAttr(eUser, eBean.getEVS_ORIGIN(), EVSSearch.VOCAB_NAME, EVSSearch.VOCAB_DBORIGIN); // "vocabName",
// "vocabDBOrigin");
if (eDB.equals(EVSSearch.META_VALUE)) // "MetaValue"))
eDB = eBean.getEVS_ORIGIN();
eBean.setEVS_DATABASE(eDB); // eBean.getEVS_ORIGIN());
}
// System.out.println(eBean.getEVS_ORIGIN() + " before thes concept for REP " + eDB);
// EVSSearch evs = new EVSSearch(m_classReq, m_classRes, this);
//eBean = evs.getThesaurusConcept(eBean);
// add to the vector and store it in the session, reset if primary and alredy existed, add otehrwise
if (repType.equals("Primary") && vRepTerm.size() > 0)
vRepTerm.setElementAt(eBean, 0);
else
vRepTerm.addElement(eBean);
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
DataManager.setAttribute(session, "newRepTerm", "true");
// DataManager.setAttribute(session, "selRepQRow", sSelRow);
// add to name if appending
if (nameAction.equals("appendName"))
vdBean = (VD_Bean) this.getACNames(eBean, "Search", vdBean);
return vdBean;
} // end addRepConcepts
/**
* The doValidateVD method gets the values from page the user filled out, validates the input, then forwards results
* to the Validate page Called from 'doCreateVDActions', 'doSubmitVD' method. Calls 'setAC.setVDValueFromPage' to
* set the data from the page to the bean. Calls 'setAC.setValidatePageValuesVD' to validate the data. Stores 'm_VD'
* bean in session. Forwards the page 'ValidateVDPage.jsp' with validation vector to display.
*
* @throws Exception
*/
private void doValidateVDBlockEdit() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
DataManager.setAttribute(session, "VDPageAction", "validate"); // store the page action in attribute
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
DataManager.setAttribute(session, "m_VD", m_VD);
m_setAC.setValidateBlockEdit(m_classReq, m_classRes, "ValueDomain");
DataManager.setAttribute(session, "VDEditAction", "VDBlockEdit");
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
} // end of doValidateVD
/**
* The doInsertVD method to insert or update record in the database. Called from 'service' method where reqType is
* 'validateVDFromForm'. Retrieves the session bean m_VD. if the action is reEditVD forwards the page back to Edit
* or create pages.
*
* Otherwise, calls 'doUpdateVDAction' for editing the vd. calls 'doInsertVDfromDEAction' for creating the vd from
* DE page. calls 'doInsertVDfromMenuAction' for creating the vd from menu .
*
* @throws Exception
*/
private void doInsertVD() throws Exception
{
HttpSession session = m_classReq.getSession();
// make sure that status message is empty
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, "");
Vector vStat = new Vector();
DataManager.setAttribute(session, "vStatMsg", vStat);
String sVDAction = (String) session.getAttribute("VDAction");
if (sVDAction == null)
sVDAction = "";
String sVDEditAction = (String) session.getAttribute("VDEditAction");
if (sVDEditAction == null)
sVDEditAction = "";
String sAction = (String) m_classReq.getParameter("ValidateVDPageAction");
// String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
// String sButtonPressed = (String) session.getAttribute("LastMenuButtonPressed");
String sOriginAction = (String) session.getAttribute("originAction");
if (sAction == null)
sAction = "submitting"; // for direct submit without validating
// String spageAction = (String) m_classReq.getParameter("pageAction");
if (sAction != null)
{ // goes back to create/edit pages from validation page
if (sAction.equals("reEditVD"))
{
String vdfocus = (String) session.getAttribute("TabFocus");
if (vdfocus != null && vdfocus.equals("PV"))
ForwardJSP(m_classReq, m_classRes, "/PermissibleValue.jsp");
else
{
if (sVDAction.equals("EditVD") || sVDAction.equals("BlockEdit"))
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
else
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
}
else
{
// edit the existing vd
if (sVDAction.equals("NewVD") && sOriginAction.equals("NewVDFromMenu"))
doInsertVDfromMenuAction();
else if (sVDAction.equals("EditVD") && !sOriginAction.equals("BlockEditVD"))
doUpdateVDAction();
else if (sVDEditAction.equals("VDBlockEdit"))
doUpdateVDActionBE();
// if create new vd from create/edit DE page.
else if (sOriginAction.equals("CreateNewVDfromCreateDE")
|| sOriginAction.equals("CreateNewVDfromEditDE"))
doInsertVDfromDEAction(sOriginAction);
// from the menu AND template/ version
else
{
doInsertVDfromMenuAction();
}
}
}
} // end of doInsertVD
/**
* update record in the database and display the result. Called from 'doInsertVD' method when the aciton is editing.
* Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. updates the DEbean and sends back to
* EditDE page if origin is form DEpage otherwise calls 'serAC.refreshData' to get the refreshed search result
* forwards the page back to search page with refreshed list after updating.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'EditVDPage.jsp' for Edit.
*
* @throws Exception
*/
private void doUpdateVDAction() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
VD_Bean oldVDBean = (VD_Bean) session.getAttribute("oldVDBean");
// String sMenu = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
doInsertVDBlocks(null);
// udpate the status message with DE name and ID
storeStatusMsg("Value Domain Name : " + VDBean.getVD_LONG_NAME());
storeStatusMsg("Public ID : " + VDBean.getVD_VD_ID());
// call stored procedure to update attributes
String ret = insAC.setVD("UPD", VDBean, "Edit", oldVDBean);
// forward to search page with refreshed list after successful update
if ((ret == null) || ret.equals(""))
{
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
String sOriginAction = (String) session.getAttribute("originAction");
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
// forward page back to EditDE
if (sOriginAction.equals("editVDfromDE") || sOriginAction.equals("EditDE"))
{
DE_Bean DEBean = (DE_Bean) session.getAttribute("m_DE");
if (DEBean != null)
{
DEBean.setDE_VD_IDSEQ(VDBean.getVD_VD_IDSEQ());
DEBean.setDE_VD_PREFERRED_NAME(VDBean.getVD_PREFERRED_NAME());
DEBean.setDE_VD_NAME(VDBean.getVD_LONG_NAME());
// reset the attributes
DataManager.setAttribute(session, "originAction", "");
// add DEC Bean into DE BEan
DEBean.setDE_VD_Bean(VDBean);
DataManager.setAttribute(session, "m_DE", DEBean);
CurationServlet deServ = (DataElementServlet) getACServlet("DataElement");
DEBean = (DE_Bean) deServ.getACNames("new", "editVD", DEBean);
}
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp");
}
// go to search page with refreshed list
else
{
VDBean.setVD_ALIAS_NAME(VDBean.getVD_PREFERRED_NAME());
// VDBean.setVD_TYPE_NAME("PRIMARY");
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, "editVD");
String oldID = VDBean.getVD_VD_IDSEQ();
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Edit", oldID);
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
}
// goes back to edit page if error occurs
else
{
DataManager.setAttribute(session, "VDPageAction", "nothing");
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
}
/**
* update record in the database and display the result. Called from 'doInsertVD' method when the aciton is editing.
* Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. updates the DEbean and sends back to
* EditDE page if origin is form DEpage otherwise calls 'serAC.refreshData' to get the refreshed search result
* forwards the page back to search page with refreshed list after updating.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'EditVDPage.jsp' for Edit.
*
* @throws Exception
*/
private void doUpdateVDActionBE() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD"); // validated edited m_VD
boolean isRefreshed = false;
String ret = ":";
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
// Vector vStatMsg = new Vector();
String sNewRep = (String) session.getAttribute("newRepTerm");
if (sNewRep == null)
sNewRep = "";
//System.out.println(" new rep " + sNewRep);
Vector vBERows = (Vector) session.getAttribute("vBEResult");
int vBESize = vBERows.size();
Integer vBESize2 = new Integer(vBESize);
m_classReq.setAttribute("vBESize", vBESize2);
String sRep_IDSEQ = "";
if (vBERows.size() > 0)
{
// Be sure the buffer is loaded when doing versioning.
String newVersion = VDBean.getVD_VERSION();
if (newVersion == null)
newVersion = "";
boolean newVers = (newVersion.equals("Point") || newVersion.equals("Whole"));
if (newVers)
{
@SuppressWarnings("unchecked")
Vector<AC_Bean> tvec = vBERows;
AltNamesDefsSession.loadAsNew(this, session, tvec);
}
for (int i = 0; i < (vBERows.size()); i++)
{
// String sVD_ID = ""; //out
VD_Bean VDBeanSR = new VD_Bean();
VDBeanSR = (VD_Bean) vBERows.elementAt(i);
VD_Bean oldVDBean = new VD_Bean();
oldVDBean = oldVDBean.cloneVD_Bean(VDBeanSR);
// String oldName = (String) VDBeanSR.getVD_PREFERRED_NAME();
// updates the data from the page into the sr bean
InsertEditsIntoVDBeanSR(VDBeanSR, VDBean);
// create newly selected rep term
if (i == 0 && sNewRep.equals("true"))
{
doInsertVDBlocks(VDBeanSR); // create it
sRep_IDSEQ = VDBeanSR.getVD_REP_IDSEQ(); // get rep idseq
if (sRep_IDSEQ == null)
sRep_IDSEQ = "";
VDBean.setVD_REP_IDSEQ(sRep_IDSEQ); // add page vd bean
String sRep_Condr = VDBeanSR.getVD_REP_CONDR_IDSEQ(); // get rep condr
if (sRep_Condr == null)
sRep_Condr = "";
VDBean.setVD_REP_CONDR_IDSEQ(sRep_Condr); // add to page vd bean
// VDBean.setVD_REP_QUAL("");
}
// DataManager.setAttribute(session, "m_VD", VDBeanSR);
String oldID = oldVDBean.getVD_VD_IDSEQ();
// udpate the status message with DE name and ID
storeStatusMsg("Value Domain Name : " + VDBeanSR.getVD_LONG_NAME());
storeStatusMsg("Public ID : " + VDBeanSR.getVD_VD_ID());
// insert the version
if (newVers) // block version
{
// creates new version first and updates all other attributes
String strValid = m_setAC.checkUniqueInContext("Version", "VD", null, null, VDBeanSR, getAC,
"version");
if (strValid != null && !strValid.equals(""))
ret = "unique constraint";
else
ret = insAC.setAC_VERSION(null, null, VDBeanSR, "ValueDomain");
if (ret == null || ret.equals(""))
{
// PVServlet pvser = new PVServlet(req, res, this);
// pvser.searchVersionPV(VDBean, 0, "", "");
// get the right system name for new version
String prefName = VDBeanSR.getVD_PREFERRED_NAME();
String vdID = VDBeanSR.getVD_VD_ID();
String newVer = "v" + VDBeanSR.getVD_VERSION();
String oldVer = "v" + oldVDBean.getVD_VERSION();
// replace teh version number if system generated name
if (prefName.indexOf(vdID) > 0)
{
prefName = prefName.replaceFirst(oldVer, newVer);
VDBean.setVD_PREFERRED_NAME(prefName);
}
// keep the value and value count stored
String pvValue = VDBeanSR.getVD_Permissible_Value();
Integer pvCount = VDBeanSR.getVD_Permissible_Value_Count();
ret = insAC.setVD("UPD", VDBeanSR, "Version", oldVDBean);
if (ret == null || ret.equals(""))
{
VDBeanSR.setVD_Permissible_Value(pvValue);
VDBeanSR.setVD_Permissible_Value_Count(pvCount);
serAC.refreshData(m_classReq, m_classRes, null, null, VDBeanSR, null, "Version", oldID);
isRefreshed = true;
// reset the appened attributes to remove all the checking of the row
Vector vCheck = new Vector();
DataManager.setAttribute(session, "CheckList", vCheck);
DataManager.setAttribute(session, "AppendAction", "Not Appended");
// resetEVSBeans(req, res);
}
}
// alerady exists
else if (ret.indexOf("unique constraint") >= 0)
storeStatusMsg("\\t New version " + VDBeanSR.getVD_VERSION()
+ " already exists in the data base.\\n");
// some other problem
else
storeStatusMsg("\\t " + ret + " : Unable to create new version "
+ VDBeanSR.getVD_VERSION() + ".\\n");
}
else
// block edit
{
ret = insAC.setVD("UPD", VDBeanSR, "Edit", oldVDBean);
// forward to search page with refreshed list after successful update
if ((ret == null) || ret.equals(""))
{
serAC.refreshData(m_classReq, m_classRes, null, null, VDBeanSR, null, "Edit", oldID);
isRefreshed = true;
}
}
}
AltNamesDefsSession.blockSave(this, session);
}
// to get the final result vector if not refreshed at all
if (!(isRefreshed))
{
Vector<String> vResult = new Vector<String>();
serAC.getVDResult(m_classReq, m_classRes, vResult, "");
DataManager.setAttribute(session, "results", vResult); // store the final result in the session
DataManager.setAttribute(session, "VDPageAction", "nothing");
}
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
/**
* updates bean the selected VD from the changed values of block edit.
*
* @param VDBeanSR
* selected vd bean from search result
* @param vd
* VD_Bean of the changed values.
*
* @throws Exception
*/
private void InsertEditsIntoVDBeanSR(VD_Bean VDBeanSR, VD_Bean vd) throws Exception
{
// get all attributes of VDBean, if attribute != "" then set that attribute of VDBeanSR
String sDefinition = vd.getVD_PREFERRED_DEFINITION();
if (sDefinition == null)
sDefinition = "";
if (!sDefinition.equals(""))
VDBeanSR.setVD_PREFERRED_DEFINITION(sDefinition);
String sCD_ID = vd.getVD_CD_IDSEQ();
if (sCD_ID == null)
sCD_ID = "";
if (!sCD_ID.equals("") && !sCD_ID.equals(null))
VDBeanSR.setVD_CD_IDSEQ(sCD_ID);
String sCDName = vd.getVD_CD_NAME();
if (sCDName == null)
sCDName = "";
if (!sCDName.equals("") && !sCDName.equals(null))
VDBeanSR.setVD_CD_NAME(sCDName);
String sAslName = vd.getVD_ASL_NAME();
if (sAslName == null)
sAslName = "";
if (!sAslName.equals(""))
VDBeanSR.setVD_ASL_NAME(sAslName);
String sDtlName = vd.getVD_DATA_TYPE();
if (sDtlName == null)
sDtlName = "";
if (!sDtlName.equals(""))
VDBeanSR.setVD_DATA_TYPE(sDtlName);
String sMaxLength = vd.getVD_MAX_LENGTH_NUM();
if (sMaxLength == null)
sMaxLength = "";
if (!sMaxLength.equals(""))
VDBeanSR.setVD_MAX_LENGTH_NUM(sMaxLength);
String sFormlName = vd.getVD_FORML_NAME(); // UOM Format
if (sFormlName == null)
sFormlName = "";
if (!sFormlName.equals(""))
VDBeanSR.setVD_FORML_NAME(sFormlName);
String sUomlName = vd.getVD_UOML_NAME();
if (sUomlName == null)
sUomlName = "";
if (!sUomlName.equals(""))
VDBeanSR.setVD_UOML_NAME(sUomlName);
String sLowValue = vd.getVD_LOW_VALUE_NUM();
if (sLowValue == null)
sLowValue = "";
if (!sLowValue.equals(""))
VDBeanSR.setVD_LOW_VALUE_NUM(sLowValue);
String sHighValue = vd.getVD_HIGH_VALUE_NUM();
if (sHighValue == null)
sHighValue = "";
if (!sHighValue.equals(""))
VDBeanSR.setVD_HIGH_VALUE_NUM(sHighValue);
String sMinLength = vd.getVD_MIN_LENGTH_NUM();
if (sMinLength == null)
sMinLength = "";
if (!sMinLength.equals(""))
VDBeanSR.setVD_MIN_LENGTH_NUM(sMinLength);
String sDecimalPlace = vd.getVD_DECIMAL_PLACE();
if (sDecimalPlace == null)
sDecimalPlace = "";
if (!sDecimalPlace.equals(""))
VDBeanSR.setVD_DECIMAL_PLACE(sDecimalPlace);
String sBeginDate = vd.getVD_BEGIN_DATE();
if (sBeginDate == null)
sBeginDate = "";
if (!sBeginDate.equals(""))
VDBeanSR.setVD_BEGIN_DATE(sBeginDate);
String sEndDate = vd.getVD_END_DATE();
if (sEndDate == null)
sEndDate = "";
if (!sEndDate.equals(""))
VDBeanSR.setVD_END_DATE(sEndDate);
String sSource = vd.getVD_SOURCE();
if (sSource == null)
sSource = "";
if (!sSource.equals(""))
VDBeanSR.setVD_SOURCE(sSource);
String changeNote = vd.getVD_CHANGE_NOTE();
if (changeNote == null)
changeNote = "";
if (!changeNote.equals(""))
VDBeanSR.setVD_CHANGE_NOTE(changeNote);
// get cs-csi from the page into the DECBean for block edit
Vector vAC_CS = vd.getAC_AC_CSI_VECTOR();
if (vAC_CS != null)
VDBeanSR.setAC_AC_CSI_VECTOR(vAC_CS);
//get the Ref docs from the page into the DEBean for block edit
Vector<REF_DOC_Bean> vAC_REF_DOCS = vd.getAC_REF_DOCS();
if(vAC_REF_DOCS!=null){
Vector<REF_DOC_Bean> temp_REF_DOCS = new Vector<REF_DOC_Bean>();
for(REF_DOC_Bean refBean:vAC_REF_DOCS )
{
if(refBean.getAC_IDSEQ() == VDBeanSR.getVD_VD_IDSEQ())
{
temp_REF_DOCS.add(refBean);
}
}
VDBeanSR.setAC_REF_DOCS(temp_REF_DOCS);
}
String sRepTerm = vd.getVD_REP_TERM();
if (sRepTerm == null)
sRepTerm = "";
if (!sRepTerm.equals(""))
VDBeanSR.setVD_REP_TERM(sRepTerm);
String sRepCondr = vd.getVD_REP_CONDR_IDSEQ();
if (sRepCondr == null)
sRepCondr = "";
if (!sRepCondr.equals(""))
VDBeanSR.setVD_REP_CONDR_IDSEQ(sRepCondr);
String sREP_IDSEQ = vd.getVD_REP_IDSEQ();
if (sREP_IDSEQ != null && !sREP_IDSEQ.equals(""))
VDBeanSR.setVD_REP_IDSEQ(sREP_IDSEQ);
/*
* String sRepQual = vd.getVD_REP_QUAL(); if (sRepQual == null) sRepQual = ""; if (!sRepQual.equals(""))
* VDBeanSR.setVD_REP_QUAL(sRepQual);
*/
String version = vd.getVD_VERSION();
String lastVersion = (String) VDBeanSR.getVD_VERSION();
int index = -1;
String pointStr = ".";
String strWhBegNumber = "";
int iWhBegNumber = 0;
index = lastVersion.indexOf(pointStr);
String strPtBegNumber = lastVersion.substring(0, index);
String afterDecimalNumber = lastVersion.substring((index + 1), (index + 2));
if (index == 1)
strWhBegNumber = "";
else if (index == 2)
{
strWhBegNumber = lastVersion.substring(0, index - 1);
Integer WhBegNumber = new Integer(strWhBegNumber);
iWhBegNumber = WhBegNumber.intValue();
}
String strWhEndNumber = ".0";
String beforeDecimalNumber = lastVersion.substring((index - 1), (index));
String sNewVersion = "";
Integer IadNumber = new Integer(0);
Integer IbdNumber = new Integer(0);
String strIncADNumber = "";
String strIncBDNumber = "";
if (version == null)
version = "";
else if (version.equals("Point"))
{
// Point new version
int incrementADNumber = 0;
int incrementBDNumber = 0;
Integer adNumber = new Integer(afterDecimalNumber);
Integer bdNumber = new Integer(strPtBegNumber);
int iADNumber = adNumber.intValue(); // after decimal
int iBDNumber = bdNumber.intValue(); // before decimal
if (iADNumber != 9)
{
incrementADNumber = iADNumber + 1;
IadNumber = new Integer(incrementADNumber);
strIncADNumber = IadNumber.toString();
sNewVersion = strPtBegNumber + "." + strIncADNumber; // + strPtEndNumber;
}
else
// adNumber == 9
{
incrementADNumber = 0;
incrementBDNumber = iBDNumber + 1;
IbdNumber = new Integer(incrementBDNumber);
strIncBDNumber = IbdNumber.toString();
IadNumber = new Integer(incrementADNumber);
strIncADNumber = IadNumber.toString();
sNewVersion = strIncBDNumber + "." + strIncADNumber; // + strPtEndNumber;
}
VDBeanSR.setVD_VERSION(sNewVersion);
}
else if (version.equals("Whole"))
{
// Whole new version
Integer bdNumber = new Integer(beforeDecimalNumber);
int iBDNumber = bdNumber.intValue();
int incrementBDNumber = iBDNumber + 1;
if (iBDNumber != 9)
{
IbdNumber = new Integer(incrementBDNumber);
strIncBDNumber = IbdNumber.toString();
sNewVersion = strWhBegNumber + strIncBDNumber + strWhEndNumber;
}
else
// before decimal number == 9
{
int incrementWhBegNumber = iWhBegNumber + 1;
Integer IWhBegNumber = new Integer(incrementWhBegNumber);
String strIncWhBegNumber = IWhBegNumber.toString();
IbdNumber = new Integer(0);
strIncBDNumber = IbdNumber.toString();
sNewVersion = strIncWhBegNumber + strIncBDNumber + strWhEndNumber;
}
VDBeanSR.setVD_VERSION(sNewVersion);
}
}
/**
* creates new record in the database and display the result. Called from 'doInsertVD' method when the aciton is
* create new VD from DEPage. Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. forwards
* the page back to create DE page after successful insert.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'createVDPage.jsp' for Edit.
*
* @param sOrigin
* string value from where vd creation action was originated.
*
* @throws Exception
*/
private void doInsertVDfromDEAction(String sOrigin)
throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
// GetACSearch serAC = new GetACSearch(req, res, this);
// String sMenu = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
// insert the building blocks attriubtes before inserting vd
doInsertVDBlocks(null);
String ret = insAC.setVD("INS", VDBean, "New", null);
// updates the de bean with new vd data after successful insert and forwards to create page
if ((ret == null) || ret.equals(""))
{
DE_Bean DEBean = (DE_Bean) session.getAttribute("m_DE");
DEBean.setDE_VD_NAME(VDBean.getVD_LONG_NAME());
DEBean.setDE_VD_IDSEQ(VDBean.getVD_VD_IDSEQ());
// add DEC Bean into DE BEan
DEBean.setDE_VD_Bean(VDBean);
DataManager.setAttribute(session, "m_DE", DEBean);
CurationServlet deServ = (DataElementServlet) getACServlet("DataElement");
DEBean = (DE_Bean) deServ.getACNames("new", "newVD", DEBean);
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
if (sOrigin != null && sOrigin.equals("CreateNewVDfromEditDE"))
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp");
else
ForwardJSP(m_classReq, m_classRes, "/CreateDEPage.jsp");
}
// goes back to create vd page if error
else
{
DataManager.setAttribute(session, "VDPageAction", "validate");
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp"); // send it back to vd page
}
}
/**
* to create rep term and qualifier value from EVS into cadsr. Retrieves the session bean
* m_VD. calls 'insAC.setDECQualifier' to insert the database.
*
* @param VDBeanSR
* dec attribute bean.
*
* @throws Exception
*/
private void doInsertVDBlocks(VD_Bean VDBeanSR) throws Exception
{
HttpSession session = m_classReq.getSession();
if (VDBeanSR == null)
VDBeanSR = (VD_Bean) session.getAttribute("m_VD");
String checkValidityRep = (String)session.getAttribute("checkValidityRep");
if (checkValidityRep != null && checkValidityRep.equals("Yes")){
ValidationStatusBean repStatusBean = new ValidationStatusBean();
Vector vRepTerm = (Vector) session.getAttribute("vRepTerm");
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
String userName = (String)session.getAttribute("Username");
HashMap<String, String> defaultContext = (HashMap)session.getAttribute("defaultContext");
String conteIdseq= (String)defaultContext.get("idseq");
try {
if ((vRepTerm != null && vRepTerm.size() > 0) && (defaultContext != null && defaultContext.size() > 0)) {
repStatusBean = insAC.evsBeanCheck(vRepTerm, defaultContext, "", "Representation Term");
}
// set Rep if it is null
if ((vRepTerm != null && vRepTerm.size() > 0)) {
if (!repStatusBean.isEvsBeanExists()) {
if (repStatusBean.isCondrExists()) {
VDBeanSR.setVD_REP_CONDR_IDSEQ(repStatusBean.getCondrIDSEQ());
// Create Representation Term
String repIdseq = insAC.createEvsBean(userName, repStatusBean.getCondrIDSEQ(), conteIdseq, "Representation Term");
if (repIdseq != null && !repIdseq.equals("")) {
VDBeanSR.setVD_REP_IDSEQ(repIdseq);
}
} else {
// Create Condr
String condrIdseq = insAC.createCondr(vRepTerm, repStatusBean.isAllConceptsExists());
String repIdseq = "";
// Create Representation Term
if (condrIdseq != null && !condrIdseq.equals("")) {
VDBeanSR.setVD_REP_CONDR_IDSEQ(condrIdseq);
repIdseq = insAC.createEvsBean(userName, condrIdseq, conteIdseq, "Representation Term");
}
if (repIdseq != null && !repIdseq.equals("")) {
VDBeanSR.setVD_REP_IDSEQ(repIdseq);
}
}
} else {
if (repStatusBean.isNewVersion()) {
if (repStatusBean.getEvsBeanIDSEQ() != null && !repStatusBean.getEvsBeanIDSEQ().equals("")) {
String newID = "";
newID = insAC.setOC_PROP_REP_VERSION(repStatusBean.getEvsBeanIDSEQ(), "RepTerm");
if (newID != null && !newID.equals("")) {
VDBeanSR.setVD_REP_CONDR_IDSEQ(repStatusBean.getCondrIDSEQ());
VDBeanSR.setVD_REP_IDSEQ(newID);
}
}
}else{
VDBeanSR.setVD_REP_CONDR_IDSEQ(repStatusBean.getCondrIDSEQ());
VDBeanSR.setVD_REP_IDSEQ(repStatusBean.getEvsBeanIDSEQ());
}
}
}
m_classReq.setAttribute("REP_IDSEQ", repStatusBean.getEvsBeanIDSEQ());
} catch (Exception e) {
logger.error("ERROR in ValueDoaminServlet-doInsertVDBlocks : " + e.toString(), e);
m_classReq.setAttribute("retcode", "Exception");
this.storeStatusMsg("\\t Exception : Unable to update or remove Representation Term.");
}
}else{
m_classReq.setAttribute("REP_IDSEQ", VDBeanSR.getVD_REP_IDSEQ());
}
DataManager.setAttribute(session, "newRepTerm", "");
}
/**
* creates new record in the database and display the result. Called from 'doInsertVD' method when the aciton is
* create new VD from Menu. Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. calls
* 'serAC.refreshData' to get the refreshed search result for template/version forwards the page back to create VD
* page if new VD or back to search page if template or version after successful insert.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'createVDPage.jsp' for Edit.
*
* @throws Exception
*/
private void doInsertVDfromMenuAction() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
VD_Bean oldVDBean = (VD_Bean) session.getAttribute("oldVDBean");
if (oldVDBean == null)
oldVDBean = new VD_Bean();
String ret = "";
boolean isUpdateSuccess = true;
doInsertVDBlocks(null);
if (sMenuAction.equals("NewVDVersion"))
{
// udpate the status message with DE name and ID
storeStatusMsg("Value Domain Name : " + VDBean.getVD_LONG_NAME());
storeStatusMsg("Public ID : " + VDBean.getVD_VD_ID());
// creates new version first
ret = insAC.setAC_VERSION(null, null, VDBean, "ValueDomain");
if (ret == null || ret.equals(""))
{
// get pvs related to this new VD, it was created in VD_Version
// TODO serAC.doPVACSearch(VDBean.getVD_VD_IDSEQ(), VDBean.getVD_LONG_NAME(), "Version");
PVServlet pvser = new PVServlet(m_classReq, m_classRes, this);
pvser.searchVersionPV(VDBean, 1, "", "");
// update non evs changes
Vector<EVS_Bean> vParent = VDBean.getReferenceConceptList(); // (Vector)session.getAttribute("VDParentConcept");
if (vParent != null && vParent.size() > 0)
vParent = serAC.getNonEVSParent(vParent, VDBean, "versionSubmit");
// get the right system name for new version; cannot use teh api because parent concept is not updated
// yet
String prefName = VDBean.getVD_PREFERRED_NAME();
if (prefName == null || prefName.equalsIgnoreCase("(Generated by the System)"))
{
VDBean = (VD_Bean) this.getSystemName(VDBean, vParent);
VDBean.setVD_PREFERRED_NAME(VDBean.getAC_SYS_PREF_NAME());
}
// and updates all other attributes
ret = insAC.setVD("UPD", VDBean, "Version", oldVDBean);
// resetEVSBeans(req, res);
if (ret != null && !ret.equals(""))
{
// add newly created row to searchresults and send it to edit page for update
isUpdateSuccess = false;
String oldID = oldVDBean.getVD_VD_IDSEQ();
String newID = VDBean.getVD_VD_IDSEQ();
String newVersion = VDBean.getVD_VERSION();
VDBean = VDBean.cloneVD_Bean(oldVDBean);
VDBean.setVD_VD_IDSEQ(newID);
VDBean.setVD_VERSION(newVersion);
VDBean.setVD_ASL_NAME("DRAFT MOD");
// refresh the result list by inserting newly created VD
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Version", oldID);
}
}
else
storeStatusMsg("\\t " + ret + " - Unable to create new version successfully.");
}
else
{
// creates new one
ret = insAC.setVD("INS", VDBean, "New", oldVDBean); // create new one
}
if ((ret == null) || ret.equals(""))
{
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
DataManager.setAttribute(session, "VDPageAction", "nothing");
DataManager.setAttribute(session, "originAction", "");
// forwards to search page with refreshed list if template or version
if ((sMenuAction.equals("NewVDTemplate")) || (sMenuAction.equals("NewVDVersion")))
{
DataManager.setAttribute(session, "searchAC", "ValueDomain");
DataManager.setAttribute(session, "originAction", "NewVDTemplate");
VDBean.setVD_ALIAS_NAME(VDBean.getVD_PREFERRED_NAME());
// VDBean.setVD_TYPE_NAME("PRIMARY");
String oldID = oldVDBean.getVD_VD_IDSEQ();
if (sMenuAction.equals("NewVDTemplate"))
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Template", oldID);
else if (sMenuAction.equals("NewVDVersion"))
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Version", oldID);
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
// forward to create vd page with empty data if new one
else
{
doOpenCreateNewPages();
}
}
// goes back to create/edit vd page if error
else
{
DataManager.setAttribute(session, "VDPageAction", "validate");
// forward to create or edit pages
if (isUpdateSuccess == false)
{
// insert the created NUE in the results.
String oldID = oldVDBean.getVD_VD_IDSEQ();
if (sMenuAction.equals("NewVDTemplate"))
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Template", oldID);
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
else
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
}
/**
* The doOpenCreateVDPage method gets the session, gets some values from the createDE page and stores in bean m_DE,
* sets some session attributes, then forwards to CreateVD page
*
* @throws Exception
*/
public void doOpenCreateVDPage() throws Exception
{
HttpSession session = m_classReq.getSession();
DE_Bean m_DE = (DE_Bean) session.getAttribute("m_DE");
if (m_DE == null)
m_DE = new DE_Bean();
m_setAC.setDEValueFromPage(m_classReq, m_classRes, m_DE); // store VD bean
DataManager.setAttribute(session, "m_DE", m_DE);
// clear some session attributes
this.clearCreateSessionAttributes(m_classReq, m_classRes);
// reset the vd attributes
VD_Bean m_VD = new VD_Bean();
m_VD.setVD_ASL_NAME("DRAFT NEW");
m_VD.setAC_PREF_NAME_TYPE("SYS");
// call the method to get the QuestValues if exists
String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
if (sMenuAction.equals("Questions"))
{
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.getACQuestionValue(m_VD);
// check if enumerated or not
Vector vCRFval = (Vector) session.getAttribute("vQuestValue");
if (vCRFval != null && vCRFval.size() > 0)
m_VD.setVD_TYPE_FLAG("E");
else
m_VD.setVD_TYPE_FLAG("N");
// read property file and set the VD bean for Placeholder data
String VDDefinition = NCICurationServlet.m_settings.getProperty("VDDefinition");
m_VD.setVD_PREFERRED_DEFINITION(VDDefinition);
String DataType = NCICurationServlet.m_settings.getProperty("DataType");
m_VD.setVD_DATA_TYPE(DataType);
String MaxLength = NCICurationServlet.m_settings.getProperty("MaxLength");
m_VD.setVD_MAX_LENGTH_NUM(MaxLength);
}
DataManager.setAttribute(session, "m_VD", m_VD);
VD_Bean oldVD = new VD_Bean();
oldVD = oldVD.cloneVD_Bean(m_VD);
DataManager.setAttribute(session, "oldVDBean", oldVD);
// DataManager.setAttribute(session, "oldVDBean", m_VD);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/**
*
* @throws Exception
*
*/
private void doRemoveBuildingBlocksVD() throws Exception
{
HttpSession session = m_classReq.getSession();
String sSelRow = "";
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD");
if (m_VD == null)
m_VD = new VD_Bean();
Vector<EVS_Bean> vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector<EVS_Bean>();
String sComp = (String) m_classReq.getParameter("sCompBlocks");
if (sComp == null)
sComp = "";
if (sComp.equals("RepTerm"))
{
EVS_Bean m_REP = new EVS_Bean();
vRepTerm.setElementAt(m_REP, 0);
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
m_VD.setVD_REP_NAME_PRIMARY("");
m_VD.setVD_REP_CONCEPT_CODE("");
m_VD.setVD_REP_EVS_CUI_ORIGEN("");
m_VD.setVD_REP_IDSEQ("");
DataManager.setAttribute(session, "RemoveRepBlock", "true");
DataManager.setAttribute(session, "newRepTerm", "true");
}
else if (sComp.equals("RepQualifier"))
{
sSelRow = (String) m_classReq.getParameter("selRepQRow");
if (sSelRow != null && !(sSelRow.equals("")))
{
Integer intObjRow = new Integer(sSelRow);
int intObjRow2 = intObjRow.intValue();
if (vRepTerm.size() > (intObjRow2 + 1))
{
vRepTerm.removeElementAt(intObjRow2 + 1); // add 1 so zero element not removed
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
}
// m_VD.setVD_REP_QUAL("");
Vector vRepQualifierNames = m_VD.getVD_REP_QUALIFIER_NAMES();
if (vRepQualifierNames == null)
vRepQualifierNames = new Vector();
if (vRepQualifierNames.size() > intObjRow2)
vRepQualifierNames.removeElementAt(intObjRow2);
Vector vRepQualifierCodes = m_VD.getVD_REP_QUALIFIER_CODES();
if (vRepQualifierCodes == null)
vRepQualifierCodes = new Vector();
if (vRepQualifierCodes.size() > intObjRow2)
vRepQualifierCodes.removeElementAt(intObjRow2);
Vector vRepQualifierDB = m_VD.getVD_REP_QUALIFIER_DB();
if (vRepQualifierDB == null)
vRepQualifierDB = new Vector();
if (vRepQualifierDB.size() > intObjRow2)
vRepQualifierDB.removeElementAt(intObjRow2);
m_VD.setVD_REP_QUALIFIER_NAMES(vRepQualifierNames);
m_VD.setVD_REP_QUALIFIER_CODES(vRepQualifierCodes);
m_VD.setVD_REP_QUALIFIER_DB(vRepQualifierDB);
m_VD.setVD_REP_IDSEQ("");
DataManager.setAttribute(session, "RemoveRepBlock", "true");
DataManager.setAttribute(session, "newRepTerm", "true");
}
}
else if (sComp.equals("VDObjectClass"))
{
m_VD.setVD_OBJ_CLASS("");
DataManager.setAttribute(session, "m_OC", new EVS_Bean());
}
else if (sComp.equals("VDPropertyClass"))
{
m_VD.setVD_PROP_CLASS("");
DataManager.setAttribute(session, "m_PC", new EVS_Bean());
}
if (sComp.equals("RepTerm") || sComp.equals("RepQualifier")){
vRepTerm = (Vector)session.getAttribute("vRepTerm");
if (vRepTerm != null && vRepTerm.size() > 0){
vRepTerm = this.getMatchingThesarusconcept(vRepTerm, "Representation Term");
m_VD = this.updateRepAttribues(vRepTerm, m_VD);
}
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
}
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
DataManager.setAttribute(session, "m_VD", m_VD);
} // end of doRemoveQualifier
/**method to go back from vd and pv edits
* @param orgAct String value for origin where vd page was opened
* @param menuAct String value of menu action where this use case started
* @param actype String what action is expected
* @param butPress STring last button pressed
* @param vdPageFrom string to check if it was PV or VD page
* @return String jsp to forward the page to
*/
public String goBackfromVD(String orgAct, String menuAct, String actype, String butPress, String vdPageFrom)
{
try
{
//forward the page to editDE if originated from DE
HttpSession session = m_classReq.getSession();
clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
if (vdPageFrom.equals("create"))
{
clearCreateSessionAttributes(m_classReq, m_classRes);
if (menuAct.equals("NewVDTemplate") || menuAct.equals("NewVDVersion"))
{
VD_Bean VDBean = (VD_Bean)session.getAttribute(PVForm.SESSION_SELECT_VD);
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Refresh", "");
return "/SearchResultsPage.jsp";
}
else if (orgAct.equalsIgnoreCase("CreateNewVDfromEditDE"))
return "/EditDEPage.jsp";
else
return "/CreateDEPage.jsp";
}
else if (vdPageFrom.equals("edit"))
{
if (orgAct.equalsIgnoreCase("editVDfromDE"))
return "/EditDEPage.jsp";
//forward the page to search if originated from Search
else if (menuAct.equalsIgnoreCase("editVD") || orgAct.equalsIgnoreCase("EditVD") || orgAct.equalsIgnoreCase("BlockEditVD")
|| (butPress.equals("Search") && !actype.equals("DataElement")))
{
VD_Bean VDBean = (VD_Bean)session.getAttribute(PVForm.SESSION_SELECT_VD);
if (VDBean == null)
VDBean = new VD_Bean();
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Refresh", "");
return "/SearchResultsPage.jsp";
}
else
return "/EditVDPage.jsp";
}
}
catch (Exception e)
{
logger.error("ERROR - ", e);
}
return "";
}
/** to clear the edited data from the edit and create pages
* @param orgAct String value for origin where vd page was opened
* @param menuAct String value of menu action where this use case started
* @return String jsp to forward the page to
*/
public String clearEditsOnPage(String orgAct, String menuAct)
{
try
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean)session.getAttribute("oldVDBean");
//clear related the session attributes
clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
String sVDID = VDBean.getVD_VD_IDSEQ();
Vector vList = new Vector();
//get VD's attributes from the database again
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
if (sVDID != null && !sVDID.equals(""))
serAC.doVDSearch(sVDID, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", "", "", "", "", "", "", "",vList, "0");
//forward editVD page with this bean
if (vList.size() > 0)
{
VDBean = (VD_Bean)vList.elementAt(0);
VDBean = serAC.getVDAttributes(VDBean, orgAct, menuAct);
}
else
{
VDBean = new VD_Bean();
VDBean.setVD_ASL_NAME("DRAFT NEW");
VDBean.setAC_PREF_NAME_TYPE("SYS");
}
VD_Bean pgBean = new VD_Bean();
DataManager.setAttribute(session, PVForm.SESSION_SELECT_VD, pgBean.cloneVD_Bean(VDBean));
}
catch (Exception e)
{
logger.error("ERROR - ", e);
}
return "/CreateVDPage.jsp";
}
public void doOpenViewPage() throws Exception
{
//System.out.println("I am here open view page");
HttpSession session = m_classReq.getSession();
String acID = (String) m_classReq.getAttribute("acIdseq");
if (acID.equals(""))
acID = m_classReq.getParameter("idseq");
Vector<VD_Bean> vList = new Vector<VD_Bean>();
// get DE's attributes from the database again
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
if (acID != null && !acID.equals(""))
{
serAC.doVDSearch(acID, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", "", "", "", "",
"", "", "", vList, "0");
}
if (vList.size() > 0) // get all attributes
{
VD_Bean VDBean = (VD_Bean) vList.elementAt(0);
VDBean = serAC.getVDAttributes(VDBean, "openView", "viewVD");
DataManager.setAttribute(session, "TabFocus", "VD");
m_classReq.setAttribute("viewVDId", VDBean.getIDSEQ());
String viewVD = "viewVD" + VDBean.getIDSEQ();
DataManager.setAttribute(session, viewVD, VDBean);
String title = "CDE Curation View VD "+VDBean.getVD_LONG_NAME()+ " [" + VDBean.getVD_VD_ID() + "v" + VDBean.getVD_VERSION() +"]";
m_classReq.setAttribute("title", title);
m_classReq.setAttribute("publicID", VDBean.getVD_VD_ID());
m_classReq.setAttribute("version", VDBean.getVD_VERSION());
m_classReq.setAttribute("IncludeViewPage", "EditVD.jsp") ;
}
}
public void doViewPageTab() throws Exception{
String tab = m_classReq.getParameter("vdpvstab");
String from = m_classReq.getParameter("from");
String id = m_classReq.getParameter("id");
String viewVD = "viewVD" + id;
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean)session.getAttribute(viewVD);
String publicId = VDBean.getVD_VD_ID();
String version = VDBean.getVD_VERSION();
m_classReq.setAttribute("viewVDId", id);
String title = "CDE Curation View VD "+VDBean.getVD_LONG_NAME()+ " [" + VDBean.getVD_VD_ID() + "v" + VDBean.getVD_VERSION() +"]";
m_classReq.setAttribute("title", title);
m_classReq.setAttribute("publicID", VDBean.getVD_VD_ID());
m_classReq.setAttribute("version", VDBean.getVD_VERSION());
DataManager.setAttribute(session, "VDAction", "");
if (from.equals("edit")){
m_classReq.getSession().setAttribute("displayErrorMessage", "Yes");
}
if (tab != null && tab.equals("PV")) {
DataManager.setAttribute(session, "TabFocus", "PV");
m_classReq.setAttribute("IncludeViewPage", "PermissibleValue.jsp") ;
ForwardJSP(m_classReq, m_classRes, "/ViewPage.jsp");
}else{
DataManager.setAttribute(session, "TabFocus", "VD");
m_classReq.setAttribute("IncludeViewPage", "EditVD.jsp") ;
ForwardJSP(m_classReq, m_classRes, "/ViewPage.jsp");
}
}
private VD_Bean updateRepAttribues(Vector vRep, VD_Bean vdBean) {
HttpSession session = m_classReq.getSession();
// add rep primary attributes to the vd bean
EVS_Bean pBean =(EVS_Bean)vRep.get(0);
String nvpValue = "";
if (checkNVP(pBean))
nvpValue="::"+pBean.getNVP_CONCEPT_VALUE();
if (pBean.getLONG_NAME() != null)
vdBean.setVD_REP_NAME_PRIMARY(pBean.getLONG_NAME()+nvpValue);
vdBean.setVD_REP_CONCEPT_CODE(pBean.getCONCEPT_IDENTIFIER());
vdBean.setVD_REP_EVS_CUI_ORIGEN(pBean.getEVS_DATABASE());
vdBean.setVD_REP_IDSEQ(pBean.getIDSEQ());
DataManager.setAttribute(session, "m_REP", pBean);
// update qualifier vectors
vdBean.setVD_REP_QUALIFIER_NAMES(null);
vdBean.setVD_REP_QUALIFIER_CODES(null);
vdBean.setVD_REP_QUALIFIER_DB(null);
for (int i=1; i<vRep.size();i++){
EVS_Bean eBean =(EVS_Bean)vRep.get(i);
nvpValue = "";
if (checkNVP(eBean))
nvpValue="::"+eBean.getNVP_CONCEPT_VALUE();
// add rep qualifiers to the vector
Vector<String> vRepQualifierNames = vdBean.getVD_REP_QUALIFIER_NAMES();
if (vRepQualifierNames == null)
vRepQualifierNames = new Vector<String>();
vRepQualifierNames.addElement(eBean.getLONG_NAME()+nvpValue);
Vector<String> vRepQualifierCodes = vdBean.getVD_REP_QUALIFIER_CODES();
if (vRepQualifierCodes == null)
vRepQualifierCodes = new Vector<String>();
vRepQualifierCodes.addElement(eBean.getCONCEPT_IDENTIFIER());
Vector<String> vRepQualifierDB = vdBean.getVD_REP_QUALIFIER_DB();
if (vRepQualifierDB == null)
vRepQualifierDB = new Vector<String>();
vRepQualifierDB.addElement(eBean.getEVS_DATABASE());
vdBean.setVD_REP_QUALIFIER_NAMES(vRepQualifierNames);
vdBean.setVD_REP_QUALIFIER_CODES(vRepQualifierCodes);
vdBean.setVD_REP_QUALIFIER_DB(vRepQualifierDB);
// if(vRepQualifierNames.size()>0)
// vdBean.setVD_REP_QUAL((String)vRepQualifierNames.elementAt(0));
DataManager.setAttribute(session, "vRepQResult", null);
DataManager.setAttribute(session, "m_REPQ", eBean);
}
return vdBean;
}
public boolean checkNVP(EVS_Bean eCon) {
return (eCon.getNAME_VALUE_PAIR_IND() > 0 && eCon.getLONG_NAME().indexOf("::") < 1 && eCon.getNVP_CONCEPT_VALUE().length() > 0);
}
}
|
src/gov/nih/nci/cadsr/cdecurate/tool/ValueDomainServlet.java
|
package gov.nih.nci.cadsr.cdecurate.tool;
import java.util.HashMap;
import java.util.Vector;
import gov.nih.nci.cadsr.cdecurate.ui.AltNamesDefsSession;
import gov.nih.nci.cadsr.cdecurate.util.DataManager;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class ValueDomainServlet extends CurationServlet {
public ValueDomainServlet() {
}
public ValueDomainServlet(HttpServletRequest req, HttpServletResponse res,
ServletContext sc) {
super(req, res, sc);
}
public void execute(ACRequestTypes reqType) throws Exception {
switch (reqType){
case newVDFromMenu:
doOpenCreateNewPages();
break;
case newVDfromForm:
doCreateVDActions();
break;
case editVD:
doEditVDActions();
break;
case createNewVD:
doOpenCreateVDPage();
break;
case validateVDFromForm:
doInsertVD();
break;
case viewVALUEDOMAIN:
doOpenViewPage();
break;
case viewVDPVSTab:
doViewPageTab();
break;
}
}
/**
* The doOpenCreateNewPages method will set some session attributes then forward the request to a Create page.
* Called from 'service' method where reqType is 'newDEFromMenu', 'newDECFromMenu', 'newVDFromMenu' Sets some
* initial session attributes. Calls 'getAC.getACList' to get the Data list from the database for the selected
* context. Sets session Bean and forwards the create page for the selected component.
* @throws Exception
*/
private void doOpenCreateNewPages() throws Exception
{
HttpSession session = m_classReq.getSession();
clearSessionAttributes(m_classReq, m_classRes);
this.clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
String context = (String) session.getAttribute("sDefaultContext"); // from Login.jsp
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, "nothing");
DataManager.setAttribute(session, "DDEAction", "nothing"); // reset from "CreateNewDEFComp"
DataManager.setAttribute(session, "sCDEAction", "nothing");
DataManager.setAttribute(session, "VDPageAction", "nothing");
DataManager.setAttribute(session, "DECPageAction", "nothing");
DataManager.setAttribute(session, "sDefaultContext", context);
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
DataManager.setAttribute(session, "originAction", "NewVDFromMenu");
DataManager.setAttribute(session, "LastMenuButtonPressed", "CreateVD");
VD_Bean m_VD = new VD_Bean();
m_VD.setVD_ASL_NAME("DRAFT NEW");
m_VD.setAC_PREF_NAME_TYPE("SYS");
DataManager.setAttribute(session, "m_VD", m_VD);
VD_Bean oldVD = new VD_Bean();
oldVD.setVD_ASL_NAME("DRAFT NEW");
oldVD.setAC_PREF_NAME_TYPE("SYS");
DataManager.setAttribute(session, "oldVDBean", oldVD);
EVS_Bean m_OC = new EVS_Bean();
DataManager.setAttribute(session, "m_OC", m_OC);
EVS_Bean m_PC = new EVS_Bean();
DataManager.setAttribute(session, "m_PC", m_PC);
EVS_Bean m_REP = new EVS_Bean();
DataManager.setAttribute(session, "m_REP", m_REP);
EVS_Bean m_OCQ = new EVS_Bean();
DataManager.setAttribute(session, "m_OCQ", m_OCQ);
EVS_Bean m_PCQ = new EVS_Bean();
DataManager.setAttribute(session, "m_PCQ", m_PCQ);
EVS_Bean m_REPQ = new EVS_Bean();
DataManager.setAttribute(session, "m_REPQ", m_REPQ);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
} // end of doOpenCreateNewPages
/**
* The doCreateVDActions method handles CreateVD or EditVD actions of the request. Called from 'service' method
* where reqType is 'newVDfromForm' Calls 'doValidateVD' if the action is Validate or submit. Calls 'doSuggestionDE'
* if the action is open EVS Window.
*
* @throws Exception
*/
private void doCreateVDActions() throws Exception
{
HttpSession session = m_classReq.getSession();
String sMenuAction = (String) m_classReq.getParameter("MenuAction");
if (sMenuAction != null)
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, sMenuAction);
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction ==null ) sAction ="";
DataManager.setAttribute(session, "VDPageAction", sAction); // store the page action in attribute
String sSubAction = (String) m_classReq.getParameter("VDAction");
DataManager.setAttribute(session, "VDAction", sSubAction);
String sOriginAction = (String) session.getAttribute("originAction");
//System.out.println("create vd " + sAction);
/* if (sAction.equals("changeContext"))
doChangeContext(req, res, "vd");
else */if (sAction.equals("validate"))
{
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
}
else if (sAction.equals("submit"))
doSubmitVD();
else if (sAction.equals("createPV") || sAction.equals("editPV") || sAction.equals("removePV"))
doOpenCreatePVPage(m_classReq, m_classRes, sAction, "createVD");
else if (sAction.equals("removePVandParent") || sAction.equals("removeParent"))
doRemoveParent(sAction, "createVD");
// else if (sAction.equals("searchPV"))
// doSearchPV(m_classReq, m_classRes);
else if (sAction.equals("createVM"))
doOpenCreateVMPage(m_classReq, m_classRes, "vd");
else if (sAction.equals("Enum") || sAction.equals("NonEnum"))
{
doSetVDPage("Create");
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
else if (sAction.equals("clearBoxes"))
{
String ret = clearEditsOnPage(sOriginAction, sMenuAction); // , "vdEdits");
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/*
* else if (sAction.equals("refreshCreateVD")) { doSelectParentVD(req, res); ForwardJSP(req, res,
* "/CreateVDPage.jsp"); return; }
*/else if (sAction.equals("UseSelection"))
{
String nameAction = "newName";
if (sMenuAction.equals("NewVDTemplate") || sMenuAction.equals("NewVDVersion"))
nameAction = "appendName";
doVDUseSelection(nameAction);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
return;
}
else if (sAction.equals("RemoveSelection"))
{
doRemoveBuildingBlocksVD();
// re work on the naming if new one
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
EVS_Bean nullEVS = null;
if (!sMenuAction.equals("NewVDTemplate") && !sMenuAction.equals("NewVDVersion"))
vd = (VD_Bean) this.getACNames(nullEVS, "Search", vd); // change only abbr pref name
else
vd = (VD_Bean) this.getACNames(nullEVS, "Remove", vd); // need to change the long name & def also
DataManager.setAttribute(session, "m_VD", vd);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
return;
}
else if (sAction.equals("changeNameType"))
{
this.doChangeVDNameType();
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/*
* else if (sAction.equals("CreateNonEVSRef")) { doNonEVSReference(req, res); ForwardJSP(req, res,
* "/CreateVDPage.jsp"); }
*/else if (sAction.equals("addSelectedCon"))
{
doSelectVMConcept(m_classReq, m_classRes, sAction);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
else if (sAction.equals("sortPV"))
{
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
String sField = (String) m_classReq.getParameter("pvSortColumn");
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
serAC.getVDPVSortedRows(vd,sField,"create",""); // call the method to sort pv attribute
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
return;
}
else if (sAction.equals("Store Alternate Names") || sAction.equals("Store Reference Documents"))
this.doMarkACBeanForAltRef(m_classReq, m_classRes, "ValueDomain", sAction, "createAC");
// add/edit or remove contacts
else if (sAction.equals("doContactUpd") || sAction.equals("removeContact"))
{
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
// capture all page attributes
m_setAC.setVDValueFromPage(m_classReq, m_classRes, VDBean);
VDBean.setAC_CONTACTS(this.doContactACUpdates(m_classReq, sAction));
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
// open the DE page or search page with
else if (sAction.equals("goBack"))
{
String sFor = goBackfromVD(sOriginAction, sMenuAction, "", "", "create");
ForwardJSP(m_classReq, m_classRes, sFor);
}
else if (sAction.equals("vdpvstab"))
{
DataManager.setAttribute(session, "TabFocus", "PV");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/PermissibleValue.jsp");
}
else if (sAction.equals("vddetailstab"))
{
DataManager.setAttribute(session, "TabFocus", "VD");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
}
/**
* The doEditDEActions method handles EditDE actions of the request. Called from 'service' method where reqType is
* 'EditVD' Calls 'ValidateDE' if the action is Validate or submit. Calls 'doSuggestionDE' if the action is open EVS
* Window.
*
* @throws Exception
*/
private void doEditVDActions() throws Exception
{
HttpSession session = m_classReq.getSession();
String sMenuAction = (String) m_classReq.getParameter("MenuAction");
if (sMenuAction != null)
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, sMenuAction);
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction ==null ) sAction ="";
DataManager.setAttribute(session, "VDPageAction", sAction); // store the page action in attribute
String sSubAction = (String) m_classReq.getParameter("VDAction");
DataManager.setAttribute(session, "VDAction", sSubAction);
String sButtonPressed = (String) session.getAttribute("LastMenuButtonPressed");
String sSearchAC = (String) session.getAttribute("SearchAC");
if (sSearchAC == null)
sSearchAC = "";
String sOriginAction = (String) session.getAttribute("originAction");
if (sAction.equals("submit"))
doSubmitVD();
else if (sAction.equals("validate") && sOriginAction.equals("BlockEditVD"))
doValidateVDBlockEdit();
else if (sAction.equals("validate"))
{
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
}
else if (sAction.equals("suggestion"))
doSuggestionDE(m_classReq, m_classRes);
else if (sAction.equals("UseSelection"))
{
String nameAction = "appendName";
if (sOriginAction.equals("BlockEditVD"))
nameAction = "blockName";
doVDUseSelection(nameAction);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
return;
}
else if (sAction.equals("RemoveSelection"))
{
doRemoveBuildingBlocksVD();
// re work on the naming if new one
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
EVS_Bean nullEVS = null;
vd = (VD_Bean) this.getACNames(nullEVS, "Remove", vd); // change only abbr pref name
DataManager.setAttribute(session, "m_VD", vd);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
return;
}
else if (sAction.equals("changeNameType"))
{
this.doChangeVDNameType();
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("sortPV"))
{
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
String sField = (String) m_classReq.getParameter("pvSortColumn");
VD_Bean vd = (VD_Bean) session.getAttribute("m_VD");
serAC.getVDPVSortedRows(vd,sField,"edit",""); // call the method to sort pv attribute
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
return;
}
else if (sAction.equals("createPV") || sAction.equals("editPV") || sAction.equals("removePV"))
doOpenCreatePVPage(m_classReq, m_classRes, sAction, "editVD");
else if (sAction.equals("removePVandParent") || sAction.equals("removeParent"))
doRemoveParent(sAction, "editVD");
else if (sAction.equals("addSelectedCon"))
{
doSelectVMConcept(m_classReq, m_classRes, sAction);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("Enum") || sAction.equals("NonEnum"))
{
doSetVDPage("Edit");
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("Store Alternate Names") || sAction.equals("Store Reference Documents"))
this.doMarkACBeanForAltRef(m_classReq, m_classRes, "ValueDomain", sAction, "editAC");
// add/edit or remove contacts
else if (sAction.equals("doContactUpd") || sAction.equals("removeContact"))
{
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
// capture all page attributes
m_setAC.setVDValueFromPage(m_classReq, m_classRes, VDBean);
VDBean.setAC_CONTACTS(this.doContactACUpdates(m_classReq, sAction));
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
else if (sAction.equals("clearBoxes"))
{
String ret = clearEditsOnPage(sOriginAction, sMenuAction); // , "vdEdits");
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
// open the Edit DE page or search page with
else if (sAction.equals("goBack"))
{
String sFor = goBackfromVD(sOriginAction, sMenuAction, sSearchAC, sButtonPressed, "edit");
ForwardJSP(m_classReq, m_classRes, sFor);
}
else if (sAction.equals("vdpvstab"))
{
DataManager.setAttribute(session, "TabFocus", "PV");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/PermissibleValue.jsp");
}
else if (sAction.equals("vddetailstab"))
{
DataManager.setAttribute(session, "TabFocus", "VD");
doValidateVD();
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
}
/**
* changes the dec name type as selected
*
* @param sOrigin
* string of origin action of the ac
* @throws java.lang.Exception
*/
private void doChangeVDNameType() throws Exception
{
HttpSession session = m_classReq.getSession();
// get teh selected type from teh page
VD_Bean pageVD = (VD_Bean) session.getAttribute("m_VD");
m_setAC.setVDValueFromPage(m_classReq, m_classRes, pageVD); // capture all other attributes
String sSysName = pageVD.getAC_SYS_PREF_NAME();
String sAbbName = pageVD.getAC_ABBR_PREF_NAME();
String sUsrName = pageVD.getAC_USER_PREF_NAME();
String sNameType = (String) m_classReq.getParameter("rNameConv");
if (sNameType == null || sNameType.equals(""))
sNameType = "SYS"; // default
// get the existing preferred name to make sure earlier typed one is saved in the user
String sPrefName = (String) m_classReq.getParameter("txtPreferredName");
if (sPrefName != null && !sPrefName.equals("") && !sPrefName.equals("(Generated by the System)")
&& !sPrefName.equals(sSysName) && !sPrefName.equals(sAbbName))
pageVD.setAC_USER_PREF_NAME(sPrefName); // store typed one in de bean
// reset system generated or abbr accoring
if (sNameType.equals("SYS"))
{
if (sSysName == null)
sSysName = "";
// limit to 30 characters
if (sSysName.length() > 30)
sSysName = sSysName.substring(sSysName.length() - 30);
pageVD.setVD_PREFERRED_NAME(sSysName);
}
else if (sNameType.equals("ABBR"))
pageVD.setVD_PREFERRED_NAME(sAbbName);
else if (sNameType.equals("USER"))
pageVD.setVD_PREFERRED_NAME(sUsrName);
pageVD.setAC_PREF_NAME_TYPE(sNameType); // store the type in the bean
// logger.debug(pageVD.getAC_PREF_NAME_TYPE() + " pref " + pageVD.getVD_PREFERRED_NAME());
DataManager.setAttribute(session, "m_VD", pageVD);
}
/**
* Does open editVD page action from DE page called from 'doEditDEActions' method. Calls
* 'm_setAC.setDEValueFromPage' to store the DE bean for later use Using the VD idseq, calls 'SerAC.search_VD'
* method to gets dec attributes to populate. stores VD bean in session and opens editVD page. goes back to editDE
* page if any error.
*
* @throws Exception
*/
public void doOpenEditVDPage() throws Exception
{
HttpSession session = m_classReq.getSession();
DE_Bean m_DE = (DE_Bean) session.getAttribute("m_DE");
if (m_DE == null)
m_DE = new DE_Bean();
// store the de values in the session
m_setAC.setDEValueFromPage(m_classReq, m_classRes, m_DE);
DataManager.setAttribute(session, "m_DE", m_DE);
this.clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
String sVDID = null;
String sVDid[] = m_classReq.getParameterValues("selVD");
if (sVDid != null)
sVDID = sVDid[0];
// get the dec bean for this id
if (sVDID != null)
{
Vector vList = new Vector();
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.doVDSearch(sVDID, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", "", "", "", "", "",
"", "", vList, "0");
// forward editVD page with this bean
if (vList.size() > 0)
{
for (int i = 0; i < vList.size(); i++)
{
VD_Bean VDBean = new VD_Bean();
VDBean = (VD_Bean) vList.elementAt(i);
// check if the user has write permission
String contID = VDBean.getVD_CONTE_IDSEQ();
String sUser = (String) session.getAttribute("Username");
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
String hasPermit = getAC.hasPrivilege("Create", sUser, "vd", contID);
// forward to editVD if has write permission
if (hasPermit.equals("Yes"))
{
String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
VDBean = serAC.getVDAttributes(VDBean, "Edit", sMenuAction); // get VD other Attributes
DataManager.setAttribute(session, "m_VD", VDBean);
VD_Bean oldVD = new VD_Bean();
oldVD = oldVD.cloneVD_Bean(VDBean);
DataManager.setAttribute(session, "oldVDBean", oldVD);
// DataManager.setAttribute(session, "oldVDBean", VDBean);
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp"); // forward to editVD page
}
// go back to editDE with message if no permission
else
{
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, "No edit permission in "
+ VDBean.getVD_CONTEXT_NAME() + " context");
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp"); // forward to editDE page
}
break;
}
}
// display error message and back to edit DE page
else
{
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE,
"Unable to get Existing VD attributes from the database");
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp"); // forward to editDE page
}
}
// display error message and back to editDE page
else
{
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, "Unable to get the VDid from the page");
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp"); // forward to editDE page
}
}// end doEditDECAction
/**
* Called from doCreateVDActions. Calls 'setAC.setVDValueFromPage' to set the VD data from the page. Calls
* 'setAC.setValidatePageValuesVD' to validate the data. Loops through the vector vValidate to check if everything
* is valid and Calls 'doInsertVD' to insert the data. If vector contains invalid fields, forwards to validation
* page
*
* @throws Exception
*/
private void doSubmitVD() throws Exception
{
HttpSession session = m_classReq.getSession();
DataManager.setAttribute(session, "sVDAction", "validate");
VD_Bean m_VD = new VD_Bean();
EVS_Bean m_OC = new EVS_Bean();
EVS_Bean m_PC = new EVS_Bean();
EVS_Bean m_REP = new EVS_Bean();
EVS_Bean m_OCQ = new EVS_Bean();
EVS_Bean m_PCQ = new EVS_Bean();
EVS_Bean m_REPQ = new EVS_Bean();
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
m_OC = (EVS_Bean) session.getAttribute("m_OC");
m_PC = (EVS_Bean) session.getAttribute("m_PC");
m_OCQ = (EVS_Bean) session.getAttribute("m_OCQ");
m_PCQ = (EVS_Bean) session.getAttribute("m_PCQ");
m_REP = (EVS_Bean) session.getAttribute("m_REP");
m_REPQ = (EVS_Bean) session.getAttribute("m_REPQ");
m_setAC.setValidatePageValuesVD(m_classReq, m_classRes, m_VD, m_OC, m_PC, m_REP, m_OCQ, m_PCQ, m_REPQ, getAC);
DataManager.setAttribute(session, "m_VD", m_VD);
boolean isValid = true;
Vector vValidate = new Vector();
vValidate = (Vector) m_classReq.getAttribute("vValidate");
if (vValidate == null)
isValid = false;
else
{
for (int i = 0; vValidate.size() > i; i = i + 3)
{
String sStat = (String) vValidate.elementAt(i + 2);
if (sStat.equals("Valid") == false)
{
isValid = false;
}
}
}
if (isValid == false)
{
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
}
else
{
doInsertVD();
}
} // end of doSumitVD
/**
* The doValidateVD method gets the values from page the user filled out, validates the input, then forwards results
* to the Validate page Called from 'doCreateVDActions', 'doSubmitVD' method. Calls 'setAC.setVDValueFromPage' to
* set the data from the page to the bean. Calls 'setAC.setValidatePageValuesVD' to validate the data. Stores 'm_VD'
* bean in session. Forwards the page 'ValidateVDPage.jsp' with validation vector to display.
*
* @throws Exception
*/
private void doValidateVD() throws Exception
{
HttpSession session = m_classReq.getSession();
String oldRepIdseq = (String)session.getAttribute("oldRepIdseq");
String checkValidityRep = "Yes";
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction == null)
sAction = "";
String sOriginAction = (String) session.getAttribute("originAction");
// do below for versioning to check whether these two have changed
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
EVS_Bean m_OC = new EVS_Bean();
EVS_Bean m_PC = new EVS_Bean();
EVS_Bean m_REP = new EVS_Bean();
EVS_Bean m_OCQ = new EVS_Bean();
EVS_Bean m_PCQ = new EVS_Bean();
EVS_Bean m_REPQ = new EVS_Bean();
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
DataManager.setAttribute(session, "VDPageAction", "validate"); // store the page action in attribute
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
if (sOriginAction!= null && !sOriginAction.equals("NewVDFromMenu")){
if (m_VD.getVD_REP_IDSEQ() != null && !m_VD.getVD_REP_IDSEQ().equals("") && m_VD.getVD_REP_IDSEQ().equals(oldRepIdseq)){
checkValidityRep = "No";
}
}
DataManager.setAttribute(session, "checkValidityRep", checkValidityRep);
m_OC = (EVS_Bean) session.getAttribute("m_OC");
m_PC = (EVS_Bean) session.getAttribute("m_PC");
m_OCQ = (EVS_Bean) session.getAttribute("m_OCQ");
m_PCQ = (EVS_Bean) session.getAttribute("m_PCQ");
m_REP = (EVS_Bean) session.getAttribute("m_REP");
m_REPQ = (EVS_Bean) session.getAttribute("m_REPQ");
m_setAC.setValidatePageValuesVD(m_classReq, m_classRes, m_VD, m_OC, m_PC, m_REP, m_OCQ, m_PCQ, m_REPQ, getAC);
DataManager.setAttribute(session, "m_VD", m_VD);
/*
* if(sAction.equals("Enum") || sAction.equals("NonEnum") || sAction.equals("EnumByRef")) ForwardJSP(m_classReq, m_classRes,
* "/CreateVDPage.jsp"); else if (!sAction.equals("vdpvstab") && !sAction.equals("vddetailstab"))
* ForwardJSP(req, res, "/ValidateVDPage.jsp");
*/} // end of doValidateVD
/**
* The doSetVDPage method gets the values from page the user filled out, Calls 'setAC.setVDValueFromPage' to set the
* data from the page to the bean. Stores 'm_VD' bean in session. Forwards the page 'CreateVDPage.jsp' with
* validation vector to display.
*
* @param sOrigin
* origin where it is called from
*
* @throws Exception
*/
private void doSetVDPage(String sOrigin) throws Exception
{
try
{
HttpSession session = m_classReq.getSession();
String sAction = (String) m_classReq.getParameter("pageAction");
if (sAction == null)
sAction = "";
// do below for versioning to check whether these two have changed
VD_Bean vdBean = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
m_setAC.setVDValueFromPage(m_classReq, m_classRes, vdBean);
// check if pvs are used in the form when type is changed to non enumerated.
if (!sAction.equals("Enum"))
{
// get vdid from the bean
// VD_Bean vdBean = (VD_Bean)session.getAttribute("m_VD");
String sVDid = vdBean.getVD_VD_IDSEQ();
boolean isExist = false;
if (sOrigin.equals("Edit"))
{
// call function to check if relationship exists
SetACService setAC = new SetACService(this);
isExist = setAC.checkPVQCExists(m_classReq, m_classRes, sVDid, "");
if (isExist)
{
String sMsg = "Unable to change Value Domain type to Non-Enumerated "
+ "because one or more Permissible Values are being used in a Case Report Form. \\n"
+ "Please create a new version of this Value Domain to change the type to Non-Enumerated.";
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, sMsg);
vdBean.setVD_TYPE_FLAG("E");
DataManager.setAttribute(session, "m_VD", vdBean);
}
}
// mark all the pvs as deleted to remove them while submitting.
if (!isExist)
{
Vector<PV_Bean> vVDPVs = vdBean.getVD_PV_List(); // (Vector)session.getAttribute("VDPVList");
if (vVDPVs != null)
{
// set each bean as deleted to handle later
Vector<PV_Bean> vRemVDPV = vdBean.getRemoved_VDPVList();
if (vRemVDPV == null)
vRemVDPV = new Vector<PV_Bean>();
for (int i = 0; i < vVDPVs.size(); i++)
{
PV_Bean pvBean = (PV_Bean) vVDPVs.elementAt(i);
vRemVDPV.addElement(pvBean);
}
vdBean.setRemoved_VDPVList(vRemVDPV);
vdBean.setVD_PV_List(new Vector<PV_Bean>());
}
}
}
else
{
// remove meta parents since it is not needed for enum types
Vector<EVS_Bean> vParentCon = vdBean.getReferenceConceptList(); // (Vector)session.getAttribute("VDParentConcept");
if (vParentCon == null)
vParentCon = new Vector<EVS_Bean>();
for (int i = 0; i < vParentCon.size(); i++)
{
EVS_Bean ePar = (EVS_Bean) vParentCon.elementAt(i);
if (ePar == null)
ePar = new EVS_Bean();
String parDB = ePar.getEVS_DATABASE();
// System.out.println(i + " setvdpage " + parDB);
if (parDB != null && parDB.equals("NCI Metathesaurus"))
{
ePar.setCON_AC_SUBMIT_ACTION("DEL");
vParentCon.setElementAt(ePar, i);
}
}
vdBean.setReferenceConceptList(vParentCon);
DataManager.setAttribute(session, "m_VD", vdBean);
// get back pvs associated with this vd
VD_Bean oldVD = (VD_Bean) session.getAttribute("oldVDBean");
if (oldVD == null)
oldVD = new VD_Bean();
if (oldVD.getVD_TYPE_FLAG() != null && oldVD.getVD_TYPE_FLAG().equals("E"))
{
if (oldVD.getVD_VD_IDSEQ() != null && !oldVD.getVD_VD_IDSEQ().equals(""))
{
// String pvAct = "Search";
String sMenu = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
// if (sMenu.equals("NewVDTemplate"))
// pvAct = "NewUsing";
// Integer pvCount = new Integer(0);
vdBean.setVD_PV_List(oldVD.cloneVDPVVector(oldVD.getVD_PV_List()));
vdBean.setRemoved_VDPVList(new Vector<PV_Bean>());
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
if (sMenu.equals("Questions"))
serAC.getACQuestionValue(vdBean);
}
}
}
DataManager.setAttribute(session, "m_VD", vdBean);
}
catch (Exception e)
{
logger.error("Error - doSetVDPage " + e.toString(), e);
}
} // end of doValidateVD
/**
* makes the vd's system generated name
*
* @param vd
* current vd bean
* @param vParent
* vector of seelected parents
* @return modified vd bean
*/
public AC_Bean getSystemName(AC_Bean ac, Vector<EVS_Bean> vParent)
{
VD_Bean vd = (VD_Bean)ac;
try
{
// make the system generated name
String sysName = "";
for (int i = vParent.size() - 1; i > -1; i--)
{
EVS_Bean par = (EVS_Bean) vParent.elementAt(i);
String evsDB = par.getEVS_DATABASE();
String subAct = par.getCON_AC_SUBMIT_ACTION();
if (subAct != null && !subAct.equals("DEL") && evsDB != null && !evsDB.equals("Non_EVS"))
{
// add the concept id to sysname if less than 20 characters
if (sysName.equals("") || sysName.length() < 20)
sysName += par.getCONCEPT_IDENTIFIER() + ":";
else
break;
}
}
// append vd public id and version in the end
if (vd.getVD_VD_ID() != null)
sysName += vd.getVD_VD_ID();
String sver = vd.getVD_VERSION();
if (sver != null && sver.indexOf(".") < 0)
sver += ".0";
if (vd.getVD_VERSION() != null)
sysName += "v" + sver;
// limit to 30 characters
if (sysName.length() > 30)
sysName = sysName.substring(sysName.length() - 30);
vd.setAC_SYS_PREF_NAME(sysName); // store it in vd bean
// make system name preferrd name if sys was selected
String selNameType = (String) m_classReq.getParameter("rNameConv");
// get it from the vd bean if null
if (selNameType == null)
{
selNameType = vd.getVD_TYPE_NAME();
}
else
{
// store the keyed in text in the user field for later use.
String sPrefName = (String) m_classReq.getParameter("txPreferredName");
if (selNameType != null && selNameType.equals("USER") && sPrefName != null)
vd.setAC_USER_PREF_NAME(sPrefName);
}
if (selNameType != null && selNameType.equals("SYS"))
vd.setVD_PREFERRED_NAME(sysName);
}
catch (Exception e)
{
this.logger.error("ERROR - getSystemName : " + e.toString(), e);
}
return vd;
}
/**
* marks the parent and/or its pvs as deleted from the session.
*
* @param sPVAction
* @param vdPage
* @throws java.lang.Exception
*/
private void doRemoveParent(String sPVAction, String vdPage) throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
Vector<EVS_Bean> vParentCon = m_VD.getReferenceConceptList(); // (Vector)session.getAttribute("VDParentConcept");
if (vParentCon == null)
vParentCon = new Vector<EVS_Bean>();
// get the selected parent info from teh request
String sParentCC = (String) m_classReq.getParameter("selectedParentConceptCode");
String sParentName = (String) m_classReq.getParameter("selectedParentConceptName");
String sParentDB = (String) m_classReq.getParameter("selectedParentConceptDB");
// for non evs parent compare the long names instead
if (sParentName != null && !sParentName.equals("") && sParentDB != null && sParentDB.equals("Non_EVS"))
sParentCC = sParentName;
if (sParentCC != null)
{
for (int i = 0; i < vParentCon.size(); i++)
{
EVS_Bean eBean = (EVS_Bean) vParentCon.elementAt(i);
if (eBean == null)
eBean = new EVS_Bean();
String thisParent = eBean.getCONCEPT_IDENTIFIER();
if (thisParent == null)
thisParent = "";
String thisParentName = eBean.getLONG_NAME();
if (thisParentName == null)
thisParentName = "";
String thisParentDB = eBean.getEVS_DATABASE();
if (thisParentDB == null)
thisParentDB = "";
// for non evs parent compare the long names instead
if (sParentDB != null && sParentDB.equals("Non_EVS"))
thisParent = thisParentName;
// look for the matched parent from the vector to remove
if (sParentCC.equals(thisParent))
{
@SuppressWarnings("unused") String strHTML = "";
EVSMasterTree tree = new EVSMasterTree(m_classReq, thisParentDB, this);
strHTML = tree.refreshTree(thisParentName, "false");
strHTML = tree.refreshTree("parentTree" + thisParentName, "false");
if (sPVAction.equals("removePVandParent"))
{
Vector<PV_Bean> vVDPVList = m_VD.getVD_PV_List(); // (Vector)session.getAttribute("VDPVList");
if (vVDPVList == null)
vVDPVList = new Vector<PV_Bean>();
// loop through the vector of pvs to get matched parent
for (int j = 0; j < vVDPVList.size(); j++)
{
PV_Bean pvBean = (PV_Bean) vVDPVList.elementAt(j);
if (pvBean == null)
pvBean = new PV_Bean();
EVS_Bean pvParent = (EVS_Bean) pvBean.getPARENT_CONCEPT();
if (pvParent == null)
pvParent = new EVS_Bean();
String pvParCon = pvParent.getCONCEPT_IDENTIFIER();
// match the parent concept with the pv's parent concept
if (thisParent.equals(pvParCon))
{
pvBean.setVP_SUBMIT_ACTION("DEL"); // mark the vp as deleted
// String pvID = pvBean.getPV_PV_IDSEQ();
vVDPVList.setElementAt(pvBean, j);
}
}
m_VD.setVD_PV_List(vVDPVList);
// DataManager.setAttribute(session, "VDPVList", vVDPVList);
}
// mark the parent as delected and leave
eBean.setCON_AC_SUBMIT_ACTION("DEL");
vParentCon.setElementAt(eBean, i);
break;
}
}
}
// DataManager.setAttribute(session, "VDParentConcept", vParentCon);
m_VD.setReferenceConceptList(vParentCon);
// make sure all other changes are stored back in vd
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
// make vd's system preferred name
m_VD = (VD_Bean) this.getSystemName(m_VD, vParentCon);
DataManager.setAttribute(session, "m_VD", m_VD);
// make the selected parent in hte session empty
DataManager.setAttribute(session, "SelectedParentName", "");
DataManager.setAttribute(session, "SelectedParentCC", "");
DataManager.setAttribute(session, "SelectedParentDB", "");
DataManager.setAttribute(session, "SelectedParentMetaSource", "");
// forward teh page according to vdPage
if (vdPage.equals("editVD"))
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
else
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/**
* splits the vd rep term from cadsr into individual concepts
*
* @param sComp
* name of the searched component
* @param m_Bean
* selected EVS bean
* @param nameAction
* string naming action
*
*/
private void splitIntoConceptsVD(String sComp, EVS_Bean m_Bean,String nameAction)
{
try
{
HttpSession session = m_classReq.getSession();
// String sSelRow = "";
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD");
if (m_VD == null)
m_VD = new VD_Bean();
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
Vector vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector();
String sCondr = m_Bean.getCONDR_IDSEQ();
String sLongName = m_Bean.getLONG_NAME();
String sIDSEQ = m_Bean.getIDSEQ();
if (sComp.equals("RepTerm") || sComp.equals("RepQualifier"))
{
m_VD.setVD_REP_TERM(sLongName);
m_VD.setVD_REP_IDSEQ(sIDSEQ);
}
// String sRepTerm = m_VD.getVD_REP_TERM();
if (sCondr != null && !sCondr.equals(""))
{
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
Vector vCon = getAC.getAC_Concepts(sCondr, null, true);
if (vCon != null && vCon.size() > 0)
{
for (int j = 0; j < vCon.size(); j++)
{
EVS_Bean bean = new EVS_Bean();
bean = (EVS_Bean) vCon.elementAt(j);
if (bean != null)
{
if (j == 0) // Primary Concept
m_VD = this.addRepConcepts(nameAction, m_VD, bean, "Primary");
else
// Secondary Concepts
m_VD = this.addRepConcepts(nameAction, m_VD, bean, "Qualifier");
}
}
}
}
}
catch (Exception e)
{
this.logger.error("ERROR - splitintoConceptVD : " + e.toString(), e);
}
}
/**
* this method is used to create preferred name for VD names of all three types will be stored in the bean for later
* use if type is changed, it populates name according to type selected.
*
* @param newBean
* new EVS bean to append the name to
* @param nameAct
* string new name or append name
* @param pageVD
* current vd bean
* @return VD bean
*/
public AC_Bean getACNames(EVS_Bean newBean, String nameAct, AC_Bean pageAC)
{
HttpSession session = m_classReq.getSession();
VD_Bean pageVD = (VD_Bean)pageAC;
if (pageVD == null)
pageVD = (VD_Bean) session.getAttribute("m_VD");
// get vd object class and property names
String sLongName = "";
String sPrefName = "";
String sAbbName = "";
String sDef = "";
// get the existing one if not restructuring the name but appending it
if (newBean != null)
{
sLongName = pageVD.getVD_LONG_NAME();
if (sLongName == null)
sLongName = "";
sDef = pageVD.getVD_PREFERRED_DEFINITION();
if (sDef == null)
sDef = "";
}
// get the typed text on to user name
String selNameType = "";
if (nameAct.equals("Search") || nameAct.equals("Remove"))
{
selNameType = (String) m_classReq.getParameter("rNameConv");
sPrefName = (String) m_classReq.getParameter("txPreferredName");
if (selNameType != null && selNameType.equals("USER") && sPrefName != null)
pageVD.setAC_USER_PREF_NAME(sPrefName);
}
// get the object class into the long name and abbr name
String sObjClass = pageVD.getVD_OBJ_CLASS();
if (sObjClass == null)
sObjClass = "";
if (!sObjClass.equals(""))
{
// rearrange it long name
if (newBean == null)
{
if (!sLongName.equals(""))
sLongName += " "; // add extra space if not empty
sLongName += sObjClass;
EVS_Bean mOC = (EVS_Bean) session.getAttribute("m_OC");
if (mOC != null)
{
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += mOC.getPREFERRED_DEFINITION();
}
}
if (!sAbbName.equals(""))
sAbbName += "_"; // add underscore if not empty
if (sObjClass.length() > 3)
sAbbName += sObjClass.substring(0, 4); // truncate to 4 letters
else
sAbbName = sObjClass;
}
// get the property into the long name and abbr name
String sPropClass = pageVD.getVD_PROP_CLASS();
if (sPropClass == null)
sPropClass = "";
if (!sPropClass.equals(""))
{
// rearrange it long name
if (newBean == null)
{
if (!sLongName.equals(""))
sLongName += " "; // add extra space if not empty
sLongName += sPropClass;
EVS_Bean mPC = (EVS_Bean) session.getAttribute("m_PC");
if (mPC != null)
{
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += mPC.getPREFERRED_DEFINITION();
}
}
if (!sAbbName.equals(""))
sAbbName += "_"; // add underscore if not empty
if (sPropClass.length() > 3)
sAbbName += sPropClass.substring(0, 4); // truncate to 4 letters
else
sAbbName += sPropClass;
}
Vector vRep = (Vector) session.getAttribute("vRepTerm");
if (vRep == null)
vRep = new Vector();
// add the qualifiers first
for (int i = 1; vRep.size() > i; i++)
{
EVS_Bean eCon = (EVS_Bean) vRep.elementAt(i);
if (eCon == null)
eCon = new EVS_Bean();
String conName = eCon.getLONG_NAME();
if (conName == null)
conName = "";
if (!conName.equals(""))
{
// rearrange it long name and definition
if (newBean == null)
{
if (!sLongName.equals(""))
sLongName += " ";
sLongName += conName;
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += eCon.getPREFERRED_DEFINITION();
}
if (!sAbbName.equals(""))
sAbbName += "_";
if (conName.length() > 3)
sAbbName += conName.substring(0, 4); // truncate to four letters
else
sAbbName += conName;
}
}
// add the primary
if (vRep != null && vRep.size() > 0)
{
EVS_Bean eCon = (EVS_Bean) vRep.elementAt(0);
if (eCon == null)
eCon = new EVS_Bean();
String sPrimary = eCon.getLONG_NAME();
if (sPrimary == null)
sPrimary = "";
if (!sPrimary.equals(""))
{
// rearrange it only long name and definition
if (newBean == null)
{
if (!sLongName.equals(""))
sLongName += " ";
sLongName += sPrimary;
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += eCon.getPREFERRED_DEFINITION();
}
if (!sAbbName.equals(""))
sAbbName += "_";
if (sPrimary.length() > 3)
sAbbName += sPrimary.substring(0, 4); // truncate to four letters
else
sAbbName += sPrimary;
}
}
// truncate to 30 characters
if (sAbbName != null && sAbbName.length() > 30)
sAbbName = sAbbName.substring(0, 30);
// add the abbr name to vd bean and page is selected
pageVD.setAC_ABBR_PREF_NAME(sAbbName);
// make abbr name name preferrd name if sys was selected
if (selNameType != null && selNameType.equals("ABBR"))
pageVD.setVD_PREFERRED_NAME(sAbbName);
if (newBean != null) // appending to the existing;
{
String sSelectName = newBean.getLONG_NAME();
if (!sLongName.equals(""))
sLongName += " ";
sLongName += sSelectName;
if (!sDef.equals(""))
sDef += "_"; // add definition
sDef += newBean.getPREFERRED_DEFINITION();
}
// store the long names, definition, and usr name in vd bean if searched
if (nameAct.equals("Search"))
{
pageVD.setVD_LONG_NAME(sLongName);
pageVD.setVD_PREFERRED_DEFINITION(sDef);
pageVD.setVDNAME_CHANGED(true);
}
return pageVD;
}
/**
*
* @param nameAction
* stirng name action
*
*/
private void doVDUseSelection(String nameAction)
{
try
{
HttpSession session = m_classReq.getSession();
String sSelRow = "";
boolean selectedRepQualifiers = false;
// InsACService insAC = new InsACService(req, res, this);
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD");
if (m_VD == null)
m_VD = new VD_Bean();
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
Vector<EVS_Bean> vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector<EVS_Bean>();
if (vRepTerm.size()>1){
selectedRepQualifiers = true;
}
Vector vAC = new Vector();
EVS_Bean m_REP = new EVS_Bean();
String sComp = (String) m_classReq.getParameter("sCompBlocks");
// get rep term components
if (sComp.equals("RepTerm") || sComp.equals("RepQualifier"))
{
sSelRow = (String) m_classReq.getParameter("selRepRow");
// vAC = (Vector)session.getAttribute("vRepResult");
vAC = (Vector) session.getAttribute("vACSearch");
if (vAC == null)
vAC = new Vector();
if (sSelRow != null && !sSelRow.equals(""))
{
String sObjRow = sSelRow.substring(2);
Integer intObjRow = new Integer(sObjRow);
int intObjRow2 = intObjRow.intValue();
if (vAC.size() > intObjRow2 - 1)
m_REP = (EVS_Bean) vAC.elementAt(intObjRow2);
// get name value pari
String sNVP = (String) m_classReq.getParameter("nvpConcept");
if (sNVP != null && !sNVP.equals(""))
{
m_REP.setNVP_CONCEPT_VALUE(sNVP);
String sName = m_REP.getLONG_NAME();
m_REP.setLONG_NAME(sName + "::" + sNVP);
m_REP.setPREFERRED_DEFINITION(m_REP.getPREFERRED_DEFINITION() + "::" + sNVP);
}
//System.out.println(sNVP + sComp + m_REP.getLONG_NAME());
}
else
{
storeStatusMsg("Unable to get the selected row from the Rep Term search results.");
return;
}
// send it back if unable to obtion the concept
if (m_REP == null || m_REP.getLONG_NAME() == null)
{
storeStatusMsg("Unable to obtain concept from the selected row of the " + sComp
+ " search results.\\n" + "Please try again.");
return;
}
// handle the primary search
if (sComp.equals("RepTerm"))
{
if (m_REP.getEVS_DATABASE().equals("caDSR"))
{
// split it if rep term, add concept class to the list if evs id exists
if (m_REP.getCONDR_IDSEQ() == null || m_REP.getCONDR_IDSEQ().equals(""))
{
if (m_REP.getCONCEPT_IDENTIFIER() == null || m_REP.getCONCEPT_IDENTIFIER().equals(""))
{
storeStatusMsg("This Rep Term is not associated to a concept, so the data is suspect. \\n"
+ "Please choose another Rep Term.");
}
else
m_VD = this.addRepConcepts(nameAction, m_VD, m_REP, "Primary");
}
else
splitIntoConceptsVD(sComp, m_REP, nameAction);
}
else
m_VD = this.addRepConcepts(nameAction, m_VD, m_REP, "Primary");
}
else if (sComp.equals("RepQualifier"))
{
// Do this to reserve zero position in vector for primary concept
if (vRepTerm.size() < 1)
{
EVS_Bean OCBean = new EVS_Bean();
vRepTerm.addElement(OCBean);
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
}
m_VD.setVD_REP_IDSEQ("");
m_VD = this.addRepConcepts(nameAction, m_VD, m_REP, "Qualifier");
}
}
else
{
EVS_Bean eBean = this.getEVSSelRow(m_classReq);
if (eBean != null && eBean.getLONG_NAME() != null)
{
/* if (sComp.equals("VDObjectClass"))
{
m_VD.setVD_OBJ_CLASS(eBean.getLONG_NAME());
DataManager.setAttribute(session, "m_OC", eBean);
}
else if (sComp.equals("VDPropertyClass"))
{
m_VD.setVD_PROP_CLASS(eBean.getLONG_NAME());
DataManager.setAttribute(session, "m_PC", eBean);
}
*/ if (nameAction.equals("appendName"))
m_VD = (VD_Bean) this.getACNames(eBean, "Search", m_VD);
}
}
vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm != null && vRepTerm.size() > 0){
vRepTerm = this.getMatchingThesarusconcept(vRepTerm, "Representation Term");
m_VD = this.updateRepAttribues(vRepTerm, m_VD);
}
if (m_REP.getcaDSR_COMPONENT()!= null && m_REP.getcaDSR_COMPONENT().equals("Concept Class")){
m_VD.setVD_REP_IDSEQ("");
}else{//Rep Term or from vocabulary
if(m_REP.getcaDSR_COMPONENT()!= null && !selectedRepQualifiers){//if selected existing rep term
ValidationStatusBean statusBean = new ValidationStatusBean();
statusBean.setStatusMessage("** Using existing "+m_REP.getcaDSR_COMPONENT()+" "+m_REP.getLONG_NAME()+" ("+m_REP.getID()+"v"+m_REP.getVERSION()+") from "+m_REP.getCONTEXT_NAME());
statusBean.setCondrExists(true);
statusBean.setCondrIDSEQ(m_REP.getCONDR_IDSEQ());
statusBean.setEvsBeanExists(true);
statusBean.setEvsBeanIDSEQ(m_REP.getIDSEQ());
session.setAttribute("vdStatusBean", statusBean);
}else{
m_VD.setVD_REP_IDSEQ("");
}
}
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
// rebuild new name if not appending
EVS_Bean nullEVS = null;
if (nameAction.equals("newName"))
m_VD = (VD_Bean) this.getACNames(nullEVS, "Search", m_VD);
else if (nameAction.equals("blockName"))
m_VD = (VD_Bean) this.getACNames(nullEVS, "blockName", m_VD);
DataManager.setAttribute(session, "m_VD", m_VD);
}
catch (Exception e)
{
this.logger.error("ERROR - doVDUseSelection : " + e.toString(), e);
}
} // end of doVDUseSelection
/**
* adds the selected concept to the vector of concepts for property
*
* @param nameAction
* String naming action
* @param vdBean
* selected DEC_Bean
* @param eBean
* selected EVS_Bean
* @param repType
* String property type (primary or qualifier)
* @return DEC_Bean
* @throws Exception
*/
@SuppressWarnings("unchecked")
private VD_Bean addRepConcepts(String nameAction, VD_Bean vdBean,
EVS_Bean eBean, String repType) throws Exception
{
HttpSession session = m_classReq.getSession();
// add the concept bean to the OC vector and store it in the vector
Vector<EVS_Bean> vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector<EVS_Bean>();
// get the evs user bean
EVS_UserBean eUser = (EVS_UserBean) this.sessionData.EvsUsrBean; // (EVS_UserBean)session.getAttribute(EVSSearch.EVS_USER_BEAN_ARG);
// //("EvsUserBean");
if (eUser == null)
eUser = new EVS_UserBean();
eBean.setCON_AC_SUBMIT_ACTION("INS");
eBean.setCONTE_IDSEQ(vdBean.getVD_CONTE_IDSEQ());
String eDB = eBean.getEVS_DATABASE();
if (eDB != null && eBean.getEVS_ORIGIN() != null && eDB.equalsIgnoreCase("caDSR"))
{
eDB = eBean.getVocabAttr(eUser, eBean.getEVS_ORIGIN(), EVSSearch.VOCAB_NAME, EVSSearch.VOCAB_DBORIGIN); // "vocabName",
// "vocabDBOrigin");
if (eDB.equals(EVSSearch.META_VALUE)) // "MetaValue"))
eDB = eBean.getEVS_ORIGIN();
eBean.setEVS_DATABASE(eDB); // eBean.getEVS_ORIGIN());
}
// System.out.println(eBean.getEVS_ORIGIN() + " before thes concept for REP " + eDB);
// EVSSearch evs = new EVSSearch(m_classReq, m_classRes, this);
//eBean = evs.getThesaurusConcept(eBean);
// add to the vector and store it in the session, reset if primary and alredy existed, add otehrwise
if (repType.equals("Primary") && vRepTerm.size() > 0)
vRepTerm.setElementAt(eBean, 0);
else
vRepTerm.addElement(eBean);
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
DataManager.setAttribute(session, "newRepTerm", "true");
// DataManager.setAttribute(session, "selRepQRow", sSelRow);
// add to name if appending
if (nameAction.equals("appendName"))
vdBean = (VD_Bean) this.getACNames(eBean, "Search", vdBean);
return vdBean;
} // end addRepConcepts
/**
* The doValidateVD method gets the values from page the user filled out, validates the input, then forwards results
* to the Validate page Called from 'doCreateVDActions', 'doSubmitVD' method. Calls 'setAC.setVDValueFromPage' to
* set the data from the page to the bean. Calls 'setAC.setValidatePageValuesVD' to validate the data. Stores 'm_VD'
* bean in session. Forwards the page 'ValidateVDPage.jsp' with validation vector to display.
*
* @throws Exception
*/
private void doValidateVDBlockEdit() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD"); // new VD_Bean();
DataManager.setAttribute(session, "VDPageAction", "validate"); // store the page action in attribute
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
DataManager.setAttribute(session, "m_VD", m_VD);
m_setAC.setValidateBlockEdit(m_classReq, m_classRes, "ValueDomain");
DataManager.setAttribute(session, "VDEditAction", "VDBlockEdit");
ForwardJSP(m_classReq, m_classRes, "/ValidateVDPage.jsp");
} // end of doValidateVD
/**
* The doInsertVD method to insert or update record in the database. Called from 'service' method where reqType is
* 'validateVDFromForm'. Retrieves the session bean m_VD. if the action is reEditVD forwards the page back to Edit
* or create pages.
*
* Otherwise, calls 'doUpdateVDAction' for editing the vd. calls 'doInsertVDfromDEAction' for creating the vd from
* DE page. calls 'doInsertVDfromMenuAction' for creating the vd from menu .
*
* @throws Exception
*/
private void doInsertVD() throws Exception
{
HttpSession session = m_classReq.getSession();
// make sure that status message is empty
DataManager.setAttribute(session, Session_Data.SESSION_STATUS_MESSAGE, "");
Vector vStat = new Vector();
DataManager.setAttribute(session, "vStatMsg", vStat);
String sVDAction = (String) session.getAttribute("VDAction");
if (sVDAction == null)
sVDAction = "";
String sVDEditAction = (String) session.getAttribute("VDEditAction");
if (sVDEditAction == null)
sVDEditAction = "";
String sAction = (String) m_classReq.getParameter("ValidateVDPageAction");
// String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
// String sButtonPressed = (String) session.getAttribute("LastMenuButtonPressed");
String sOriginAction = (String) session.getAttribute("originAction");
if (sAction == null)
sAction = "submitting"; // for direct submit without validating
// String spageAction = (String) m_classReq.getParameter("pageAction");
if (sAction != null)
{ // goes back to create/edit pages from validation page
if (sAction.equals("reEditVD"))
{
String vdfocus = (String) session.getAttribute("TabFocus");
if (vdfocus != null && vdfocus.equals("PV"))
ForwardJSP(m_classReq, m_classRes, "/PermissibleValue.jsp");
else
{
if (sVDAction.equals("EditVD") || sVDAction.equals("BlockEdit"))
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
else
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
}
else
{
// edit the existing vd
if (sVDAction.equals("NewVD") && sOriginAction.equals("NewVDFromMenu"))
doInsertVDfromMenuAction();
else if (sVDAction.equals("EditVD") && !sOriginAction.equals("BlockEditVD"))
doUpdateVDAction();
else if (sVDEditAction.equals("VDBlockEdit"))
doUpdateVDActionBE();
// if create new vd from create/edit DE page.
else if (sOriginAction.equals("CreateNewVDfromCreateDE")
|| sOriginAction.equals("CreateNewVDfromEditDE"))
doInsertVDfromDEAction(sOriginAction);
// from the menu AND template/ version
else
{
doInsertVDfromMenuAction();
}
}
}
} // end of doInsertVD
/**
* update record in the database and display the result. Called from 'doInsertVD' method when the aciton is editing.
* Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. updates the DEbean and sends back to
* EditDE page if origin is form DEpage otherwise calls 'serAC.refreshData' to get the refreshed search result
* forwards the page back to search page with refreshed list after updating.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'EditVDPage.jsp' for Edit.
*
* @throws Exception
*/
private void doUpdateVDAction() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
VD_Bean oldVDBean = (VD_Bean) session.getAttribute("oldVDBean");
// String sMenu = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
doInsertVDBlocks(null);
// udpate the status message with DE name and ID
storeStatusMsg("Value Domain Name : " + VDBean.getVD_LONG_NAME());
storeStatusMsg("Public ID : " + VDBean.getVD_VD_ID());
// call stored procedure to update attributes
String ret = insAC.setVD("UPD", VDBean, "Edit", oldVDBean);
// forward to search page with refreshed list after successful update
if ((ret == null) || ret.equals(""))
{
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
String sOriginAction = (String) session.getAttribute("originAction");
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
// forward page back to EditDE
if (sOriginAction.equals("editVDfromDE") || sOriginAction.equals("EditDE"))
{
DE_Bean DEBean = (DE_Bean) session.getAttribute("m_DE");
if (DEBean != null)
{
DEBean.setDE_VD_IDSEQ(VDBean.getVD_VD_IDSEQ());
DEBean.setDE_VD_PREFERRED_NAME(VDBean.getVD_PREFERRED_NAME());
DEBean.setDE_VD_NAME(VDBean.getVD_LONG_NAME());
// reset the attributes
DataManager.setAttribute(session, "originAction", "");
// add DEC Bean into DE BEan
DEBean.setDE_VD_Bean(VDBean);
DataManager.setAttribute(session, "m_DE", DEBean);
CurationServlet deServ = (DataElementServlet) getACServlet("DataElement");
DEBean = (DE_Bean) deServ.getACNames("new", "editVD", DEBean);
}
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp");
}
// go to search page with refreshed list
else
{
VDBean.setVD_ALIAS_NAME(VDBean.getVD_PREFERRED_NAME());
// VDBean.setVD_TYPE_NAME("PRIMARY");
DataManager.setAttribute(session, Session_Data.SESSION_MENU_ACTION, "editVD");
String oldID = VDBean.getVD_VD_IDSEQ();
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Edit", oldID);
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
}
// goes back to edit page if error occurs
else
{
DataManager.setAttribute(session, "VDPageAction", "nothing");
ForwardJSP(m_classReq, m_classRes, "/EditVDPage.jsp");
}
}
/**
* update record in the database and display the result. Called from 'doInsertVD' method when the aciton is editing.
* Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. updates the DEbean and sends back to
* EditDE page if origin is form DEpage otherwise calls 'serAC.refreshData' to get the refreshed search result
* forwards the page back to search page with refreshed list after updating.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'EditVDPage.jsp' for Edit.
*
* @throws Exception
*/
private void doUpdateVDActionBE() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD"); // validated edited m_VD
boolean isRefreshed = false;
String ret = ":";
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
GetACService getAC = new GetACService(m_classReq, m_classRes, this);
// Vector vStatMsg = new Vector();
String sNewRep = (String) session.getAttribute("newRepTerm");
if (sNewRep == null)
sNewRep = "";
//System.out.println(" new rep " + sNewRep);
Vector vBERows = (Vector) session.getAttribute("vBEResult");
int vBESize = vBERows.size();
Integer vBESize2 = new Integer(vBESize);
m_classReq.setAttribute("vBESize", vBESize2);
String sRep_IDSEQ = "";
if (vBERows.size() > 0)
{
// Be sure the buffer is loaded when doing versioning.
String newVersion = VDBean.getVD_VERSION();
if (newVersion == null)
newVersion = "";
boolean newVers = (newVersion.equals("Point") || newVersion.equals("Whole"));
if (newVers)
{
@SuppressWarnings("unchecked")
Vector<AC_Bean> tvec = vBERows;
AltNamesDefsSession.loadAsNew(this, session, tvec);
}
for (int i = 0; i < (vBERows.size()); i++)
{
// String sVD_ID = ""; //out
VD_Bean VDBeanSR = new VD_Bean();
VDBeanSR = (VD_Bean) vBERows.elementAt(i);
VD_Bean oldVDBean = new VD_Bean();
oldVDBean = oldVDBean.cloneVD_Bean(VDBeanSR);
// String oldName = (String) VDBeanSR.getVD_PREFERRED_NAME();
// updates the data from the page into the sr bean
InsertEditsIntoVDBeanSR(VDBeanSR, VDBean);
// create newly selected rep term
if (i == 0 && sNewRep.equals("true"))
{
doInsertVDBlocks(VDBeanSR); // create it
sRep_IDSEQ = VDBeanSR.getVD_REP_IDSEQ(); // get rep idseq
if (sRep_IDSEQ == null)
sRep_IDSEQ = "";
VDBean.setVD_REP_IDSEQ(sRep_IDSEQ); // add page vd bean
String sRep_Condr = VDBeanSR.getVD_REP_CONDR_IDSEQ(); // get rep condr
if (sRep_Condr == null)
sRep_Condr = "";
VDBean.setVD_REP_CONDR_IDSEQ(sRep_Condr); // add to page vd bean
// VDBean.setVD_REP_QUAL("");
}
// DataManager.setAttribute(session, "m_VD", VDBeanSR);
String oldID = oldVDBean.getVD_VD_IDSEQ();
// udpate the status message with DE name and ID
storeStatusMsg("Value Domain Name : " + VDBeanSR.getVD_LONG_NAME());
storeStatusMsg("Public ID : " + VDBeanSR.getVD_VD_ID());
// insert the version
if (newVers) // block version
{
// creates new version first and updates all other attributes
String strValid = m_setAC.checkUniqueInContext("Version", "VD", null, null, VDBeanSR, getAC,
"version");
if (strValid != null && !strValid.equals(""))
ret = "unique constraint";
else
ret = insAC.setAC_VERSION(null, null, VDBeanSR, "ValueDomain");
if (ret == null || ret.equals(""))
{
// PVServlet pvser = new PVServlet(req, res, this);
// pvser.searchVersionPV(VDBean, 0, "", "");
// get the right system name for new version
String prefName = VDBeanSR.getVD_PREFERRED_NAME();
String vdID = VDBeanSR.getVD_VD_ID();
String newVer = "v" + VDBeanSR.getVD_VERSION();
String oldVer = "v" + oldVDBean.getVD_VERSION();
// replace teh version number if system generated name
if (prefName.indexOf(vdID) > 0)
{
prefName = prefName.replaceFirst(oldVer, newVer);
VDBean.setVD_PREFERRED_NAME(prefName);
}
// keep the value and value count stored
String pvValue = VDBeanSR.getVD_Permissible_Value();
Integer pvCount = VDBeanSR.getVD_Permissible_Value_Count();
ret = insAC.setVD("UPD", VDBeanSR, "Version", oldVDBean);
if (ret == null || ret.equals(""))
{
VDBeanSR.setVD_Permissible_Value(pvValue);
VDBeanSR.setVD_Permissible_Value_Count(pvCount);
serAC.refreshData(m_classReq, m_classRes, null, null, VDBeanSR, null, "Version", oldID);
isRefreshed = true;
// reset the appened attributes to remove all the checking of the row
Vector vCheck = new Vector();
DataManager.setAttribute(session, "CheckList", vCheck);
DataManager.setAttribute(session, "AppendAction", "Not Appended");
// resetEVSBeans(req, res);
}
}
// alerady exists
else if (ret.indexOf("unique constraint") >= 0)
storeStatusMsg("\\t New version " + VDBeanSR.getVD_VERSION()
+ " already exists in the data base.\\n");
// some other problem
else
storeStatusMsg("\\t " + ret + " : Unable to create new version "
+ VDBeanSR.getVD_VERSION() + ".\\n");
}
else
// block edit
{
ret = insAC.setVD("UPD", VDBeanSR, "Edit", oldVDBean);
// forward to search page with refreshed list after successful update
if ((ret == null) || ret.equals(""))
{
serAC.refreshData(m_classReq, m_classRes, null, null, VDBeanSR, null, "Edit", oldID);
isRefreshed = true;
}
}
}
AltNamesDefsSession.blockSave(this, session);
}
// to get the final result vector if not refreshed at all
if (!(isRefreshed))
{
Vector<String> vResult = new Vector<String>();
serAC.getVDResult(m_classReq, m_classRes, vResult, "");
DataManager.setAttribute(session, "results", vResult); // store the final result in the session
DataManager.setAttribute(session, "VDPageAction", "nothing");
}
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
/**
* updates bean the selected VD from the changed values of block edit.
*
* @param VDBeanSR
* selected vd bean from search result
* @param vd
* VD_Bean of the changed values.
*
* @throws Exception
*/
private void InsertEditsIntoVDBeanSR(VD_Bean VDBeanSR, VD_Bean vd) throws Exception
{
// get all attributes of VDBean, if attribute != "" then set that attribute of VDBeanSR
String sDefinition = vd.getVD_PREFERRED_DEFINITION();
if (sDefinition == null)
sDefinition = "";
if (!sDefinition.equals(""))
VDBeanSR.setVD_PREFERRED_DEFINITION(sDefinition);
String sCD_ID = vd.getVD_CD_IDSEQ();
if (sCD_ID == null)
sCD_ID = "";
if (!sCD_ID.equals("") && !sCD_ID.equals(null))
VDBeanSR.setVD_CD_IDSEQ(sCD_ID);
String sCDName = vd.getVD_CD_NAME();
if (sCDName == null)
sCDName = "";
if (!sCDName.equals("") && !sCDName.equals(null))
VDBeanSR.setVD_CD_NAME(sCDName);
String sAslName = vd.getVD_ASL_NAME();
if (sAslName == null)
sAslName = "";
if (!sAslName.equals(""))
VDBeanSR.setVD_ASL_NAME(sAslName);
String sDtlName = vd.getVD_DATA_TYPE();
if (sDtlName == null)
sDtlName = "";
if (!sDtlName.equals(""))
VDBeanSR.setVD_DATA_TYPE(sDtlName);
String sMaxLength = vd.getVD_MAX_LENGTH_NUM();
if (sMaxLength == null)
sMaxLength = "";
if (!sMaxLength.equals(""))
VDBeanSR.setVD_MAX_LENGTH_NUM(sMaxLength);
String sFormlName = vd.getVD_FORML_NAME(); // UOM Format
if (sFormlName == null)
sFormlName = "";
if (!sFormlName.equals(""))
VDBeanSR.setVD_FORML_NAME(sFormlName);
String sUomlName = vd.getVD_UOML_NAME();
if (sUomlName == null)
sUomlName = "";
if (!sUomlName.equals(""))
VDBeanSR.setVD_UOML_NAME(sUomlName);
String sLowValue = vd.getVD_LOW_VALUE_NUM();
if (sLowValue == null)
sLowValue = "";
if (!sLowValue.equals(""))
VDBeanSR.setVD_LOW_VALUE_NUM(sLowValue);
String sHighValue = vd.getVD_HIGH_VALUE_NUM();
if (sHighValue == null)
sHighValue = "";
if (!sHighValue.equals(""))
VDBeanSR.setVD_HIGH_VALUE_NUM(sHighValue);
String sMinLength = vd.getVD_MIN_LENGTH_NUM();
if (sMinLength == null)
sMinLength = "";
if (!sMinLength.equals(""))
VDBeanSR.setVD_MIN_LENGTH_NUM(sMinLength);
String sDecimalPlace = vd.getVD_DECIMAL_PLACE();
if (sDecimalPlace == null)
sDecimalPlace = "";
if (!sDecimalPlace.equals(""))
VDBeanSR.setVD_DECIMAL_PLACE(sDecimalPlace);
String sBeginDate = vd.getVD_BEGIN_DATE();
if (sBeginDate == null)
sBeginDate = "";
if (!sBeginDate.equals(""))
VDBeanSR.setVD_BEGIN_DATE(sBeginDate);
String sEndDate = vd.getVD_END_DATE();
if (sEndDate == null)
sEndDate = "";
if (!sEndDate.equals(""))
VDBeanSR.setVD_END_DATE(sEndDate);
String sSource = vd.getVD_SOURCE();
if (sSource == null)
sSource = "";
if (!sSource.equals(""))
VDBeanSR.setVD_SOURCE(sSource);
String changeNote = vd.getVD_CHANGE_NOTE();
if (changeNote == null)
changeNote = "";
if (!changeNote.equals(""))
VDBeanSR.setVD_CHANGE_NOTE(changeNote);
// get cs-csi from the page into the DECBean for block edit
Vector vAC_CS = vd.getAC_AC_CSI_VECTOR();
if (vAC_CS != null)
VDBeanSR.setAC_AC_CSI_VECTOR(vAC_CS);
//get the Ref docs from the page into the DEBean for block edit
Vector<REF_DOC_Bean> vAC_REF_DOCS = vd.getAC_REF_DOCS();
if(vAC_REF_DOCS!=null){
Vector<REF_DOC_Bean> temp_REF_DOCS = new Vector<REF_DOC_Bean>();
for(REF_DOC_Bean refBean:vAC_REF_DOCS )
{
if(refBean.getAC_IDSEQ() == VDBeanSR.getVD_VD_IDSEQ())
{
temp_REF_DOCS.add(refBean);
}
}
VDBeanSR.setAC_REF_DOCS(temp_REF_DOCS);
}
String sRepTerm = vd.getVD_REP_TERM();
if (sRepTerm == null)
sRepTerm = "";
if (!sRepTerm.equals(""))
VDBeanSR.setVD_REP_TERM(sRepTerm);
String sRepCondr = vd.getVD_REP_CONDR_IDSEQ();
if (sRepCondr == null)
sRepCondr = "";
if (!sRepCondr.equals(""))
VDBeanSR.setVD_REP_CONDR_IDSEQ(sRepCondr);
String sREP_IDSEQ = vd.getVD_REP_IDSEQ();
if (sREP_IDSEQ != null && !sREP_IDSEQ.equals(""))
VDBeanSR.setVD_REP_IDSEQ(sREP_IDSEQ);
/*
* String sRepQual = vd.getVD_REP_QUAL(); if (sRepQual == null) sRepQual = ""; if (!sRepQual.equals(""))
* VDBeanSR.setVD_REP_QUAL(sRepQual);
*/
String version = vd.getVD_VERSION();
String lastVersion = (String) VDBeanSR.getVD_VERSION();
int index = -1;
String pointStr = ".";
String strWhBegNumber = "";
int iWhBegNumber = 0;
index = lastVersion.indexOf(pointStr);
String strPtBegNumber = lastVersion.substring(0, index);
String afterDecimalNumber = lastVersion.substring((index + 1), (index + 2));
if (index == 1)
strWhBegNumber = "";
else if (index == 2)
{
strWhBegNumber = lastVersion.substring(0, index - 1);
Integer WhBegNumber = new Integer(strWhBegNumber);
iWhBegNumber = WhBegNumber.intValue();
}
String strWhEndNumber = ".0";
String beforeDecimalNumber = lastVersion.substring((index - 1), (index));
String sNewVersion = "";
Integer IadNumber = new Integer(0);
Integer IbdNumber = new Integer(0);
String strIncADNumber = "";
String strIncBDNumber = "";
if (version == null)
version = "";
else if (version.equals("Point"))
{
// Point new version
int incrementADNumber = 0;
int incrementBDNumber = 0;
Integer adNumber = new Integer(afterDecimalNumber);
Integer bdNumber = new Integer(strPtBegNumber);
int iADNumber = adNumber.intValue(); // after decimal
int iBDNumber = bdNumber.intValue(); // before decimal
if (iADNumber != 9)
{
incrementADNumber = iADNumber + 1;
IadNumber = new Integer(incrementADNumber);
strIncADNumber = IadNumber.toString();
sNewVersion = strPtBegNumber + "." + strIncADNumber; // + strPtEndNumber;
}
else
// adNumber == 9
{
incrementADNumber = 0;
incrementBDNumber = iBDNumber + 1;
IbdNumber = new Integer(incrementBDNumber);
strIncBDNumber = IbdNumber.toString();
IadNumber = new Integer(incrementADNumber);
strIncADNumber = IadNumber.toString();
sNewVersion = strIncBDNumber + "." + strIncADNumber; // + strPtEndNumber;
}
VDBeanSR.setVD_VERSION(sNewVersion);
}
else if (version.equals("Whole"))
{
// Whole new version
Integer bdNumber = new Integer(beforeDecimalNumber);
int iBDNumber = bdNumber.intValue();
int incrementBDNumber = iBDNumber + 1;
if (iBDNumber != 9)
{
IbdNumber = new Integer(incrementBDNumber);
strIncBDNumber = IbdNumber.toString();
sNewVersion = strWhBegNumber + strIncBDNumber + strWhEndNumber;
}
else
// before decimal number == 9
{
int incrementWhBegNumber = iWhBegNumber + 1;
Integer IWhBegNumber = new Integer(incrementWhBegNumber);
String strIncWhBegNumber = IWhBegNumber.toString();
IbdNumber = new Integer(0);
strIncBDNumber = IbdNumber.toString();
sNewVersion = strIncWhBegNumber + strIncBDNumber + strWhEndNumber;
}
VDBeanSR.setVD_VERSION(sNewVersion);
}
}
/**
* creates new record in the database and display the result. Called from 'doInsertVD' method when the aciton is
* create new VD from DEPage. Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. forwards
* the page back to create DE page after successful insert.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'createVDPage.jsp' for Edit.
*
* @param sOrigin
* string value from where vd creation action was originated.
*
* @throws Exception
*/
private void doInsertVDfromDEAction(String sOrigin)
throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
// GetACSearch serAC = new GetACSearch(req, res, this);
// String sMenu = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
// insert the building blocks attriubtes before inserting vd
doInsertVDBlocks(null);
String ret = insAC.setVD("INS", VDBean, "New", null);
// updates the de bean with new vd data after successful insert and forwards to create page
if ((ret == null) || ret.equals(""))
{
DE_Bean DEBean = (DE_Bean) session.getAttribute("m_DE");
DEBean.setDE_VD_NAME(VDBean.getVD_LONG_NAME());
DEBean.setDE_VD_IDSEQ(VDBean.getVD_VD_IDSEQ());
// add DEC Bean into DE BEan
DEBean.setDE_VD_Bean(VDBean);
DataManager.setAttribute(session, "m_DE", DEBean);
CurationServlet deServ = (DataElementServlet) getACServlet("DataElement");
DEBean = (DE_Bean) deServ.getACNames("new", "newVD", DEBean);
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
if (sOrigin != null && sOrigin.equals("CreateNewVDfromEditDE"))
ForwardJSP(m_classReq, m_classRes, "/EditDEPage.jsp");
else
ForwardJSP(m_classReq, m_classRes, "/CreateDEPage.jsp");
}
// goes back to create vd page if error
else
{
DataManager.setAttribute(session, "VDPageAction", "validate");
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp"); // send it back to vd page
}
}
/**
* to create rep term and qualifier value from EVS into cadsr. Retrieves the session bean
* m_VD. calls 'insAC.setDECQualifier' to insert the database.
*
* @param VDBeanSR
* dec attribute bean.
*
* @throws Exception
*/
private void doInsertVDBlocks(VD_Bean VDBeanSR) throws Exception
{
HttpSession session = m_classReq.getSession();
if (VDBeanSR == null)
VDBeanSR = (VD_Bean) session.getAttribute("m_VD");
String checkValidityRep = (String)session.getAttribute("checkValidityRep");
if (checkValidityRep != null && checkValidityRep.equals("Yes")){
ValidationStatusBean repStatusBean = new ValidationStatusBean();
Vector vRepTerm = (Vector) session.getAttribute("vRepTerm");
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
String userName = (String)session.getAttribute("Username");
HashMap<String, String> defaultContext = (HashMap)session.getAttribute("defaultContext");
String conteIdseq= (String)defaultContext.get("idseq");
try {
if ((vRepTerm != null && vRepTerm.size() > 0) && (defaultContext != null && defaultContext.size() > 0)) {
repStatusBean = insAC.evsBeanCheck(vRepTerm, defaultContext, "", "Representation Term");
}
// set Rep if it is null
if ((vRepTerm != null && vRepTerm.size() > 0)) {
if (!repStatusBean.isEvsBeanExists()) {
if (repStatusBean.isCondrExists()) {
VDBeanSR.setVD_REP_CONDR_IDSEQ(repStatusBean.getCondrIDSEQ());
// Create Representation Term
String repIdseq = insAC.createEvsBean(userName, repStatusBean.getCondrIDSEQ(), conteIdseq, "Representation Term");
if (repIdseq != null && !repIdseq.equals("")) {
VDBeanSR.setVD_REP_IDSEQ(repIdseq);
}
} else {
// Create Condr
String condrIdseq = insAC.createCondr(vRepTerm, repStatusBean.isAllConceptsExists());
String repIdseq = "";
// Create Representation Term
if (condrIdseq != null && !condrIdseq.equals("")) {
VDBeanSR.setVD_REP_CONDR_IDSEQ(condrIdseq);
repIdseq = insAC.createEvsBean(userName, condrIdseq, conteIdseq, "Representation Term");
}
if (repIdseq != null && !repIdseq.equals("")) {
VDBeanSR.setVD_REP_IDSEQ(repIdseq);
}
}
} else {
if (repStatusBean.isNewVersion()) {
if (repStatusBean.getEvsBeanIDSEQ() != null && !repStatusBean.getEvsBeanIDSEQ().equals("")) {
String newID = "";
newID = insAC.setOC_PROP_REP_VERSION(repStatusBean.getEvsBeanIDSEQ(), "RepTerm");
if (newID != null && !newID.equals("")) {
VDBeanSR.setVD_REP_CONDR_IDSEQ(repStatusBean.getCondrIDSEQ());
VDBeanSR.setVD_REP_IDSEQ(newID);
}
}
}else{
VDBeanSR.setVD_REP_CONDR_IDSEQ(repStatusBean.getCondrIDSEQ());
VDBeanSR.setVD_REP_IDSEQ(repStatusBean.getEvsBeanIDSEQ());
}
}
}
m_classReq.setAttribute("REP_IDSEQ", repStatusBean.getEvsBeanIDSEQ());
} catch (Exception e) {
logger.error("ERROR in ValueDoaminServlet-doInsertVDBlocks : " + e.toString(), e);
m_classReq.setAttribute("retcode", "Exception");
this.storeStatusMsg("\\t Exception : Unable to update or remove Representation Term.");
}
}else{
m_classReq.setAttribute("REP_IDSEQ", VDBeanSR.getVD_REP_IDSEQ());
}
DataManager.setAttribute(session, "newRepTerm", "");
}
/**
* creates new record in the database and display the result. Called from 'doInsertVD' method when the aciton is
* create new VD from Menu. Retrieves the session bean m_VD. calls 'insAC.setVD' to update the database. calls
* 'serAC.refreshData' to get the refreshed search result for template/version forwards the page back to create VD
* page if new VD or back to search page if template or version after successful insert.
*
* If ret is not null stores the statusMessage as error message in session and forwards the page back to
* 'createVDPage.jsp' for Edit.
*
* @throws Exception
*/
private void doInsertVDfromMenuAction() throws Exception
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean) session.getAttribute("m_VD");
InsACService insAC = new InsACService(m_classReq, m_classRes, this);
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
VD_Bean oldVDBean = (VD_Bean) session.getAttribute("oldVDBean");
if (oldVDBean == null)
oldVDBean = new VD_Bean();
String ret = "";
boolean isUpdateSuccess = true;
doInsertVDBlocks(null);
if (sMenuAction.equals("NewVDVersion"))
{
// udpate the status message with DE name and ID
storeStatusMsg("Value Domain Name : " + VDBean.getVD_LONG_NAME());
storeStatusMsg("Public ID : " + VDBean.getVD_VD_ID());
// creates new version first
ret = insAC.setAC_VERSION(null, null, VDBean, "ValueDomain");
if (ret == null || ret.equals(""))
{
// get pvs related to this new VD, it was created in VD_Version
// TODO serAC.doPVACSearch(VDBean.getVD_VD_IDSEQ(), VDBean.getVD_LONG_NAME(), "Version");
PVServlet pvser = new PVServlet(m_classReq, m_classRes, this);
pvser.searchVersionPV(VDBean, 1, "", "");
// update non evs changes
Vector<EVS_Bean> vParent = VDBean.getReferenceConceptList(); // (Vector)session.getAttribute("VDParentConcept");
if (vParent != null && vParent.size() > 0)
vParent = serAC.getNonEVSParent(vParent, VDBean, "versionSubmit");
// get the right system name for new version; cannot use teh api because parent concept is not updated
// yet
String prefName = VDBean.getVD_PREFERRED_NAME();
if (prefName == null || prefName.equalsIgnoreCase("(Generated by the System)"))
{
VDBean = (VD_Bean) this.getSystemName(VDBean, vParent);
VDBean.setVD_PREFERRED_NAME(VDBean.getAC_SYS_PREF_NAME());
}
// and updates all other attributes
ret = insAC.setVD("UPD", VDBean, "Version", oldVDBean);
// resetEVSBeans(req, res);
if (ret != null && !ret.equals(""))
{
// add newly created row to searchresults and send it to edit page for update
isUpdateSuccess = false;
String oldID = oldVDBean.getVD_VD_IDSEQ();
String newID = VDBean.getVD_VD_IDSEQ();
String newVersion = VDBean.getVD_VERSION();
VDBean = VDBean.cloneVD_Bean(oldVDBean);
VDBean.setVD_VD_IDSEQ(newID);
VDBean.setVD_VERSION(newVersion);
VDBean.setVD_ASL_NAME("DRAFT MOD");
// refresh the result list by inserting newly created VD
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Version", oldID);
}
}
else
storeStatusMsg("\\t " + ret + " - Unable to create new version successfully.");
}
else
{
// creates new one
ret = insAC.setVD("INS", VDBean, "New", oldVDBean); // create new one
}
if ((ret == null) || ret.equals(""))
{
this.clearCreateSessionAttributes(m_classReq, m_classRes); // clear some session attributes
DataManager.setAttribute(session, "VDPageAction", "nothing");
DataManager.setAttribute(session, "originAction", "");
// forwards to search page with refreshed list if template or version
if ((sMenuAction.equals("NewVDTemplate")) || (sMenuAction.equals("NewVDVersion")))
{
DataManager.setAttribute(session, "searchAC", "ValueDomain");
DataManager.setAttribute(session, "originAction", "NewVDTemplate");
VDBean.setVD_ALIAS_NAME(VDBean.getVD_PREFERRED_NAME());
// VDBean.setVD_TYPE_NAME("PRIMARY");
String oldID = oldVDBean.getVD_VD_IDSEQ();
if (sMenuAction.equals("NewVDTemplate"))
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Template", oldID);
else if (sMenuAction.equals("NewVDVersion"))
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Version", oldID);
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
// forward to create vd page with empty data if new one
else
{
doOpenCreateNewPages();
}
}
// goes back to create/edit vd page if error
else
{
DataManager.setAttribute(session, "VDPageAction", "validate");
// forward to create or edit pages
if (isUpdateSuccess == false)
{
// insert the created NUE in the results.
String oldID = oldVDBean.getVD_VD_IDSEQ();
if (sMenuAction.equals("NewVDTemplate"))
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Template", oldID);
ForwardJSP(m_classReq, m_classRes, "/SearchResultsPage.jsp");
}
else
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
}
/**
* The doOpenCreateVDPage method gets the session, gets some values from the createDE page and stores in bean m_DE,
* sets some session attributes, then forwards to CreateVD page
*
* @throws Exception
*/
public void doOpenCreateVDPage() throws Exception
{
HttpSession session = m_classReq.getSession();
DE_Bean m_DE = (DE_Bean) session.getAttribute("m_DE");
if (m_DE == null)
m_DE = new DE_Bean();
m_setAC.setDEValueFromPage(m_classReq, m_classRes, m_DE); // store VD bean
DataManager.setAttribute(session, "m_DE", m_DE);
// clear some session attributes
this.clearCreateSessionAttributes(m_classReq, m_classRes);
// reset the vd attributes
VD_Bean m_VD = new VD_Bean();
m_VD.setVD_ASL_NAME("DRAFT NEW");
m_VD.setAC_PREF_NAME_TYPE("SYS");
// call the method to get the QuestValues if exists
String sMenuAction = (String) session.getAttribute(Session_Data.SESSION_MENU_ACTION);
if (sMenuAction.equals("Questions"))
{
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.getACQuestionValue(m_VD);
// check if enumerated or not
Vector vCRFval = (Vector) session.getAttribute("vQuestValue");
if (vCRFval != null && vCRFval.size() > 0)
m_VD.setVD_TYPE_FLAG("E");
else
m_VD.setVD_TYPE_FLAG("N");
// read property file and set the VD bean for Placeholder data
String VDDefinition = NCICurationServlet.m_settings.getProperty("VDDefinition");
m_VD.setVD_PREFERRED_DEFINITION(VDDefinition);
String DataType = NCICurationServlet.m_settings.getProperty("DataType");
m_VD.setVD_DATA_TYPE(DataType);
String MaxLength = NCICurationServlet.m_settings.getProperty("MaxLength");
m_VD.setVD_MAX_LENGTH_NUM(MaxLength);
}
DataManager.setAttribute(session, "m_VD", m_VD);
VD_Bean oldVD = new VD_Bean();
oldVD = oldVD.cloneVD_Bean(m_VD);
DataManager.setAttribute(session, "oldVDBean", oldVD);
// DataManager.setAttribute(session, "oldVDBean", m_VD);
ForwardJSP(m_classReq, m_classRes, "/CreateVDPage.jsp");
}
/**
*
* @throws Exception
*
*/
private void doRemoveBuildingBlocksVD() throws Exception
{
HttpSession session = m_classReq.getSession();
String sSelRow = "";
VD_Bean m_VD = (VD_Bean) session.getAttribute("m_VD");
if (m_VD == null)
m_VD = new VD_Bean();
Vector<EVS_Bean> vRepTerm = (Vector) session.getAttribute("vRepTerm");
if (vRepTerm == null)
vRepTerm = new Vector<EVS_Bean>();
String sComp = (String) m_classReq.getParameter("sCompBlocks");
if (sComp == null)
sComp = "";
if (sComp.equals("RepTerm"))
{
EVS_Bean m_REP = new EVS_Bean();
vRepTerm.setElementAt(m_REP, 0);
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
m_VD.setVD_REP_NAME_PRIMARY("");
m_VD.setVD_REP_CONCEPT_CODE("");
m_VD.setVD_REP_EVS_CUI_ORIGEN("");
m_VD.setVD_REP_IDSEQ("");
DataManager.setAttribute(session, "RemoveRepBlock", "true");
DataManager.setAttribute(session, "newRepTerm", "true");
}
else if (sComp.equals("RepQualifier"))
{
sSelRow = (String) m_classReq.getParameter("selRepQRow");
if (sSelRow != null && !(sSelRow.equals("")))
{
Integer intObjRow = new Integer(sSelRow);
int intObjRow2 = intObjRow.intValue();
if (vRepTerm.size() > (intObjRow2 + 1))
{
vRepTerm.removeElementAt(intObjRow2 + 1); // add 1 so zero element not removed
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
}
// m_VD.setVD_REP_QUAL("");
Vector vRepQualifierNames = m_VD.getVD_REP_QUALIFIER_NAMES();
if (vRepQualifierNames == null)
vRepQualifierNames = new Vector();
if (vRepQualifierNames.size() > intObjRow2)
vRepQualifierNames.removeElementAt(intObjRow2);
Vector vRepQualifierCodes = m_VD.getVD_REP_QUALIFIER_CODES();
if (vRepQualifierCodes == null)
vRepQualifierCodes = new Vector();
if (vRepQualifierCodes.size() > intObjRow2)
vRepQualifierCodes.removeElementAt(intObjRow2);
Vector vRepQualifierDB = m_VD.getVD_REP_QUALIFIER_DB();
if (vRepQualifierDB == null)
vRepQualifierDB = new Vector();
if (vRepQualifierDB.size() > intObjRow2)
vRepQualifierDB.removeElementAt(intObjRow2);
m_VD.setVD_REP_QUALIFIER_NAMES(vRepQualifierNames);
m_VD.setVD_REP_QUALIFIER_CODES(vRepQualifierCodes);
m_VD.setVD_REP_QUALIFIER_DB(vRepQualifierDB);
m_VD.setVD_REP_IDSEQ("");
DataManager.setAttribute(session, "RemoveRepBlock", "true");
DataManager.setAttribute(session, "newRepTerm", "true");
}
}
else if (sComp.equals("VDObjectClass"))
{
m_VD.setVD_OBJ_CLASS("");
DataManager.setAttribute(session, "m_OC", new EVS_Bean());
}
else if (sComp.equals("VDPropertyClass"))
{
m_VD.setVD_PROP_CLASS("");
DataManager.setAttribute(session, "m_PC", new EVS_Bean());
}
if (sComp.equals("RepTerm") || sComp.equals("RepQualifier")){
vRepTerm = (Vector)session.getAttribute("vRepTerm");
if (vRepTerm != null && vRepTerm.size() > 0){
vRepTerm = this.getMatchingThesarusconcept(vRepTerm, "Representation Term");
m_VD = this.updateRepAttribues(vRepTerm, m_VD);
}
DataManager.setAttribute(session, "vRepTerm", vRepTerm);
}
m_setAC.setVDValueFromPage(m_classReq, m_classRes, m_VD);
DataManager.setAttribute(session, "m_VD", m_VD);
} // end of doRemoveQualifier
/**method to go back from vd and pv edits
* @param orgAct String value for origin where vd page was opened
* @param menuAct String value of menu action where this use case started
* @param actype String what action is expected
* @param butPress STring last button pressed
* @param vdPageFrom string to check if it was PV or VD page
* @return String jsp to forward the page to
*/
public String goBackfromVD(String orgAct, String menuAct, String actype, String butPress, String vdPageFrom)
{
try
{
//forward the page to editDE if originated from DE
HttpSession session = m_classReq.getSession();
clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
if (vdPageFrom.equals("create"))
{
clearCreateSessionAttributes(m_classReq, m_classRes);
if (menuAct.equals("NewVDTemplate") || menuAct.equals("NewVDVersion"))
{
VD_Bean VDBean = (VD_Bean)session.getAttribute(PVForm.SESSION_SELECT_VD);
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Refresh", "");
return "/SearchResultsPage.jsp";
}
else if (orgAct.equalsIgnoreCase("CreateNewVDfromEditDE"))
return "/EditDEPage.jsp";
else
return "/CreateDEPage.jsp";
}
else if (vdPageFrom.equals("edit"))
{
if (orgAct.equalsIgnoreCase("editVDfromDE"))
return "/EditDEPage.jsp";
//forward the page to search if originated from Search
else if (menuAct.equalsIgnoreCase("editVD") || orgAct.equalsIgnoreCase("EditVD") || orgAct.equalsIgnoreCase("BlockEditVD")
|| (butPress.equals("Search") && !actype.equals("DataElement")))
{
VD_Bean VDBean = (VD_Bean)session.getAttribute(PVForm.SESSION_SELECT_VD);
if (VDBean == null)
VDBean = new VD_Bean();
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
serAC.refreshData(m_classReq, m_classRes, null, null, VDBean, null, "Refresh", "");
return "/SearchResultsPage.jsp";
}
else
return "/EditVDPage.jsp";
}
}
catch (Exception e)
{
logger.error("ERROR - ", e);
}
return "";
}
/** to clear the edited data from the edit and create pages
* @param orgAct String value for origin where vd page was opened
* @param menuAct String value of menu action where this use case started
* @return String jsp to forward the page to
*/
public String clearEditsOnPage(String orgAct, String menuAct)
{
try
{
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean)session.getAttribute("oldVDBean");
//clear related the session attributes
clearBuildingBlockSessionAttributes(m_classReq, m_classRes);
String sVDID = VDBean.getVD_VD_IDSEQ();
Vector vList = new Vector();
//get VD's attributes from the database again
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
if (sVDID != null && !sVDID.equals(""))
serAC.doVDSearch(sVDID, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", "", "", "", "", "", "", "",vList, "0");
//forward editVD page with this bean
if (vList.size() > 0)
{
VDBean = (VD_Bean)vList.elementAt(0);
VDBean = serAC.getVDAttributes(VDBean, orgAct, menuAct);
}
else
{
VDBean = new VD_Bean();
VDBean.setVD_ASL_NAME("DRAFT NEW");
VDBean.setAC_PREF_NAME_TYPE("SYS");
}
VD_Bean pgBean = new VD_Bean();
DataManager.setAttribute(session, PVForm.SESSION_SELECT_VD, pgBean.cloneVD_Bean(VDBean));
}
catch (Exception e)
{
logger.error("ERROR - ", e);
}
return "/CreateVDPage.jsp";
}
public void doOpenViewPage() throws Exception
{
//System.out.println("I am here open view page");
HttpSession session = m_classReq.getSession();
String acID = (String) m_classReq.getAttribute("acIdseq");
if (acID.equals(""))
acID = m_classReq.getParameter("idseq");
Vector<VD_Bean> vList = new Vector<VD_Bean>();
// get DE's attributes from the database again
GetACSearch serAC = new GetACSearch(m_classReq, m_classRes, this);
if (acID != null && !acID.equals(""))
{
serAC.doVDSearch(acID, "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", 0, "", "", "", "", "",
"", "", "", vList, "0");
}
if (vList.size() > 0) // get all attributes
{
VD_Bean VDBean = (VD_Bean) vList.elementAt(0);
VDBean = serAC.getVDAttributes(VDBean, "openView", "viewVD");
DataManager.setAttribute(session, "TabFocus", "VD");
m_classReq.setAttribute("viewVDId", VDBean.getIDSEQ());
String viewVD = "viewVD" + VDBean.getIDSEQ();
DataManager.setAttribute(session, viewVD, VDBean);
String title = "CDE Curation View VD "+VDBean.getVD_LONG_NAME()+ " [" + VDBean.getVD_VD_ID() + "v" + VDBean.getVD_VERSION() +"]";
m_classReq.setAttribute("title", title);
m_classReq.setAttribute("publicID", VDBean.getVD_VD_ID());
m_classReq.setAttribute("version", VDBean.getVD_VERSION());
m_classReq.setAttribute("IncludeViewPage", "EditVD.jsp") ;
}
}
public void doViewPageTab() throws Exception{
String tab = m_classReq.getParameter("vdpvstab");
String from = m_classReq.getParameter("from");
String id = m_classReq.getParameter("id");
String viewVD = "viewVD" + id;
HttpSession session = m_classReq.getSession();
VD_Bean VDBean = (VD_Bean)session.getAttribute(viewVD);
String publicId = VDBean.getVD_VD_ID();
String version = VDBean.getVD_VERSION();
m_classReq.setAttribute("viewVDId", id);
String title = "CDE Curation View VD "+VDBean.getVD_LONG_NAME()+ " [" + VDBean.getVD_VD_ID() + "v" + VDBean.getVD_VERSION() +"]";
m_classReq.setAttribute("title", title);
m_classReq.setAttribute("publicID", VDBean.getVD_VD_ID());
m_classReq.setAttribute("version", VDBean.getVD_VERSION());
DataManager.setAttribute(session, "VDAction", "");
if (from.equals("edit")){
m_classReq.getSession().setAttribute("displayErrorMessage", "Yes");
}
if (tab != null && tab.equals("PV")) {
DataManager.setAttribute(session, "TabFocus", "PV");
m_classReq.setAttribute("IncludeViewPage", "PermissibleValue.jsp") ;
ForwardJSP(m_classReq, m_classRes, "/ViewPage.jsp");
}else{
DataManager.setAttribute(session, "TabFocus", "VD");
m_classReq.setAttribute("IncludeViewPage", "EditVD.jsp") ;
ForwardJSP(m_classReq, m_classRes, "/ViewPage.jsp");
}
}
private VD_Bean updateRepAttribues(Vector vRep, VD_Bean vdBean) {
HttpSession session = m_classReq.getSession();
// add rep primary attributes to the vd bean
EVS_Bean pBean =(EVS_Bean)vRep.get(0);
String nvpValue = "";
if (pBean.getNAME_VALUE_PAIR_IND() > 0)
nvpValue="::"+pBean.getNVP_CONCEPT_VALUE();
vdBean.setVD_REP_NAME_PRIMARY(pBean.getLONG_NAME()+nvpValue);
vdBean.setVD_REP_CONCEPT_CODE(pBean.getCONCEPT_IDENTIFIER());
vdBean.setVD_REP_EVS_CUI_ORIGEN(pBean.getEVS_DATABASE());
vdBean.setVD_REP_IDSEQ(pBean.getIDSEQ());
DataManager.setAttribute(session, "m_REP", pBean);
// update qualifier vectors
vdBean.setVD_REP_QUALIFIER_NAMES(null);
vdBean.setVD_REP_QUALIFIER_CODES(null);
vdBean.setVD_REP_QUALIFIER_DB(null);
for (int i=1; i<vRep.size();i++){
EVS_Bean eBean =(EVS_Bean)vRep.get(i);
nvpValue = "";
if (eBean.getNAME_VALUE_PAIR_IND() > 0)
nvpValue="::"+eBean.getNVP_CONCEPT_VALUE();
// add rep qualifiers to the vector
Vector<String> vRepQualifierNames = vdBean.getVD_REP_QUALIFIER_NAMES();
if (vRepQualifierNames == null)
vRepQualifierNames = new Vector<String>();
vRepQualifierNames.addElement(eBean.getLONG_NAME()+nvpValue);
Vector<String> vRepQualifierCodes = vdBean.getVD_REP_QUALIFIER_CODES();
if (vRepQualifierCodes == null)
vRepQualifierCodes = new Vector<String>();
vRepQualifierCodes.addElement(eBean.getCONCEPT_IDENTIFIER());
Vector<String> vRepQualifierDB = vdBean.getVD_REP_QUALIFIER_DB();
if (vRepQualifierDB == null)
vRepQualifierDB = new Vector<String>();
vRepQualifierDB.addElement(eBean.getEVS_DATABASE());
vdBean.setVD_REP_QUALIFIER_NAMES(vRepQualifierNames);
vdBean.setVD_REP_QUALIFIER_CODES(vRepQualifierCodes);
vdBean.setVD_REP_QUALIFIER_DB(vRepQualifierDB);
// if(vRepQualifierNames.size()>0)
// vdBean.setVD_REP_QUAL((String)vRepQualifierNames.elementAt(0));
DataManager.setAttribute(session, "vRepQResult", null);
DataManager.setAttribute(session, "m_REPQ", eBean);
}
return vdBean;
}
}
|
Fix for NVP
SVN-Revision: 1502
|
src/gov/nih/nci/cadsr/cdecurate/tool/ValueDomainServlet.java
|
Fix for NVP
|
|
Java
|
bsd-3-clause
|
d6eada9aba2bc531b0f24c5dcb6596845f6c2fbb
| 0
|
skadistats/clarity-examples,kwahsog/clarity-examples
|
package skadistats.clarity.examples.matchend;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import skadistats.clarity.model.Entity;
import skadistats.clarity.processor.entities.Entities;
import skadistats.clarity.processor.entities.UsesEntities;
import skadistats.clarity.processor.runner.Context;
import skadistats.clarity.processor.runner.Runner;
import java.io.FileInputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
@UsesEntities
public class Main {
private final Logger log = LoggerFactory.getLogger(Main.class.getPackage().getClass());
public void run(String[] args) throws Exception {
long tStart = System.currentTimeMillis();
Context ctx = new Runner().runWith(new FileInputStream(args[0]), this);
summary(ctx);
long tMatch = System.currentTimeMillis() - tStart;
log.info("total time taken: {}s", (tMatch) / 1000.0);
}
private void summary(Context ctx) throws UnsupportedEncodingException {
class ColDef {
String columnName;
String propertyName;
List<String> values;
int width;
public ColDef(String columnName, String propertyName) {
this.columnName = columnName;
this.propertyName = propertyName;
this.width = columnName.length();
}
}
ColDef[] columns = new ColDef[] {
new ColDef("Name", "m_iszPlayerNames"),
new ColDef("Level", "m_iLevel"),
new ColDef("K", "m_iKills"),
new ColDef("D", "m_iDeaths"),
new ColDef("A", "m_iAssists"),
new ColDef("Gold", "EndScoreAndSpectatorStats.m_iTotalEarnedGold"),
new ColDef("LH", "m_iLastHitCount"),
new ColDef("DN", "m_iDenyCount"),
};
Entity ps = ctx.getProcessor(Entities.class).getByDtName("DT_DOTA_PlayerResource");
for (ColDef c : columns) {
c.values = new ArrayList<>();
int baseIndex = ps.getDtClass().getPropertyIndex(c.propertyName + ".0000");
for (int p = 0; p < 10; p++) {
String v = new String(ps.getState()[baseIndex + p].toString().getBytes("ISO-8859-1"));
c.values.add(v);
c.width = Math.max(c.width, v.length());
}
}
StringBuffer buf = new StringBuffer();
String space = " ";
for (ColDef c : columns) {
buf.append(c.columnName);
buf.append(space, 0, c.width - c.columnName.length() + 2);
}
System.out.println(buf);
for (int p = 0; p < 10; p++) {
buf.setLength(0);
for (ColDef c : columns) {
buf.append(c.values.get(p));
buf.append(space, 0, c.width - c.values.get(p).length() + 2);
}
System.out.println(buf);
}
}
public static void main(String[] args) throws Exception {
if (System.console() != null) {
System.console().readLine();
}
new Main().run(args);
}
}
|
src/main/java/skadistats/clarity/examples/matchend/Main.java
|
package skadistats.clarity.examples.matchend;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import skadistats.clarity.model.Entity;
import skadistats.clarity.processor.entities.Entities;
import skadistats.clarity.processor.entities.UsesEntities;
import skadistats.clarity.processor.runner.Context;
import skadistats.clarity.processor.runner.Runner;
import java.io.FileInputStream;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
@UsesEntities
public class Main {
private final Logger log = LoggerFactory.getLogger(Main.class.getPackage().getClass());
public void run(String[] args) throws Exception {
long tStart = System.currentTimeMillis();
Context ctx = new Runner().runWith(new FileInputStream(args[0]), this);
summary(ctx);
long tMatch = System.currentTimeMillis() - tStart;
log.info("total time taken: {}s", (tMatch) / 1000.0);
}
private void summary(Context ctx) throws UnsupportedEncodingException {
class ColDef {
String columnName;
String propertyName;
List<String> values;
int width;
public ColDef(String columnName, String propertyName) {
this.columnName = columnName;
this.propertyName = propertyName;
this.width = columnName.length();
}
}
ColDef[] columns = new ColDef[] {
new ColDef("Name", "m_iszPlayerNames"),
new ColDef("Level", "m_iLevel"),
new ColDef("K", "m_iKills"),
new ColDef("D", "m_iDeaths"),
new ColDef("A", "m_iAssists"),
new ColDef("Gold", "EndScoreAndSpectatorStats.m_iTotalEarnedGold"),
new ColDef("LH", "m_iLastHitCount"),
new ColDef("DN", "m_iDenyCount"),
};
Entity ps = ctx.getProcessor(Entities.class).getByDtName("DT_DOTA_PlayerResource");
for (ColDef c : columns) {
c.values = new ArrayList<>();
int baseIndex = ps.getDtClass().getPropertyIndex(c.propertyName + ".0000");
for (int p = 0; p < 10; p++) {
String v = new String(ps.getState()[baseIndex + p].toString().getBytes("ISO-8859-1"));
c.values.add(v);
c.width = Math.max(c.width, v.length());
}
}
StringBuffer buf = new StringBuffer();
String space = " ";
for (ColDef c : columns) {
buf.append(c.columnName);
buf.append(space, 0, c.width - c.columnName.length() + 2);
}
System.out.println(buf);
for (int p = 0; p < 10; p++) {
buf.setLength(0);
for (ColDef c : columns) {
buf.append(c.values.get(p));
buf.append(space, 0, c.width - c.values.get(p).length() + 2);
}
System.out.println(buf);
}
}
public static void main(String[] args) throws Exception {
new Main().run(args);
}
}
|
wait on startup (for profiling)
|
src/main/java/skadistats/clarity/examples/matchend/Main.java
|
wait on startup (for profiling)
|
|
Java
|
apache-2.0
|
26b8416ca8b59392030146d01e66739e6adbe22c
| 0
|
mhardalov/gate-sports-processor
|
src/test/java/org/sports/gate/ontology/OntologyTests.java
|
package org.sports.gate.ontology;
import java.util.Calendar;
import org.junit.Test;
import org.sports.ontology.OntologyHandler;
import org.sports.ontology.model.DocumentModel;
import org.sports.ontology.model.PersonQuotes;
import org.sports.ontology.model.ResultRelation;
import com.hp.hpl.jena.rdf.model.Resource;
public class OntologyTests {
static String ontologyFile = "/home/momchil/Projects/spring-demo/gate-sports-processor/src/main/resources/gate/sports_terms/ontology/sports.owl";
@Test
public void ontologyTest() {
PersonQuotes quotes = new PersonQuotes();
quotes.setPerson("John Smith");
quotes.addQuote("I'm the best.");
quotes.addQuote("Ontology testing with some quotes by me.");
quotes.addQuote("Third sentsence for today.");
ResultRelation relation = new ResultRelation();
relation.setResult("7:2");
relation.getCompetitors().add("Levski");
relation.getCompetitors().add("CSKA");
DocumentModel document = new DocumentModel();
document.setContent("Empty");
document.setUrl("http://somewhere/JohnSmith");
document.setDate(Calendar.getInstance().getTime());
OntologyHandler handler = new OntologyHandler();
Resource resource = handler.registerDocument(document);
handler.addPersonQuote(quotes, resource);
handler.addResultRelation(relation, resource);
quotes = new PersonQuotes();
quotes.setPerson("Tom Johnes");
quotes.addQuote("Second Quote by me.");
quotes.addQuote("Come on do it!");
relation = new ResultRelation();
relation.setResult("10s");
relation.getCompetitors().add("Bolt");
document = new DocumentModel();
document.setContent("Empty");
document.setUrl("http://somewhere/TomJohnes");
document.setDate(Calendar.getInstance().getTime());
resource = handler.registerDocument(document);
handler.addPersonQuote(quotes, resource);
handler.addResultRelation(relation, resource);
handler.print();
}
@Test
public void ontologyQueryTest() {
OntologyHandler handler = new OntologyHandler();
handler.open(ontologyFile);
//bg.sportal.www:http/news.php?news=342208
//http://www.sportal.bg/news.php?news=342208
handler.query("bg.sportal.www:http/news.php?news=342208");
}
}
|
Moved Ontology tests to ontology project
|
src/test/java/org/sports/gate/ontology/OntologyTests.java
|
Moved Ontology tests to ontology project
|
||
Java
|
mit
|
a66834cf885279b049fa72b126aac6311b1fa539
| 0
|
drcrazy/TechReborn,TechReborn/TechReborn,Dimmerworld/TechReborn
|
package techreborn.items;
import java.util.List;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import techreborn.client.TechRebornCreativeTabMisc;
public class ItemDustTiny extends ItemTR {
public static final String[] types = new String[]
{ "Almandine", "AluminumBrass", "Aluminium", "Alumite", "Andradite",
"Antimony", "Ardite", "Ashes", "Basalt", "Bauxite", "Biotite",
"Brass", "Bronze", "Cadmium", "Calcite", "Charcoal", "Chrome",
"Cinnabar", "Clay", "Coal", "Cobalt", "Copper", "Cupronickel",
"DarkAshes", "DarkIron", "Diamond", "Electrum", "Emerald",
"EnderEye", "EnderPearl", "Endstone", "Flint", "Glowstone", "Gold", "Graphite",
"Grossular", "Gunpowder", "Indium", "Invar", "Iridium", "Iron", "Lapis", "Lazurite",
"Lead", "Limestone", "Lodestone", "Magnesium", "Magnetite", "Manganese",
"Manyullyn", "Marble", "Mithril", "Netherrack", "Nichrome", "Nickel",
"Obsidian", "Osmium", "Peridot", "Phosphorous", "Platinum", "PotassiumFeldspar",
"Pyrite", "Pyrope", "RedGarnet", "Redrock", "Redstone", "Ruby", "Saltpeter",
"Sapphire", "Silicon", "Sodalite", "Spessartine", "Sphalerite",
"Steel", "Sulfur", "Tellurium", "Teslatite", "Tetrahedrite", "Tin",
"Titanium", "Tungsten", "Uvarovite", "Vinteum", "Voidstone", "YellowGarnet",
"Zinc" };
private IIcon[] textures;
public ItemDustTiny()
{
setUnlocalizedName("techreborn.dusttiny");
setHasSubtypes(true);
setCreativeTab(TechRebornCreativeTabMisc.instance);
}
@Override
// Registers Textures For All Dusts
public void registerIcons(IIconRegister iconRegister)
{
textures = new IIcon[types.length];
for (int i = 0; i < types.length; ++i)
{
textures[i] = iconRegister.registerIcon("techreborn:" + "tinyDust/tiny"
+ types[i] + "Dust");
}
}
@Override
// Adds Texture what match's meta data
public IIcon getIconFromDamage(int meta)
{
if (meta < 0 || meta >= textures.length)
{
meta = 0;
}
return textures[meta];
}
@Override
// gets Unlocalized Name depending on meta data
public String getUnlocalizedName(ItemStack itemStack)
{
int meta = itemStack.getItemDamage();
if (meta < 0 || meta >= types.length)
{
meta = 0;
}
return super.getUnlocalizedName() + "." + types[meta];
}
// Adds Dusts SubItems To Creative Tab
public void getSubItems(Item item, CreativeTabs creativeTabs, List list)
{
for (int meta = 0; meta < types.length; ++meta)
{
list.add(new ItemStack(item, 1, meta));
}
}
@Override
public EnumRarity getRarity(ItemStack itemstack)
{
return EnumRarity.uncommon;
}
}
|
src/main/java/techreborn/items/ItemDustTiny.java
|
package techreborn.items;
import java.util.List;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.EnumRarity;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import techreborn.client.TechRebornCreativeTabMisc;
public class ItemDustTiny extends ItemTR {
"Almandine", "AluminumBrass", "Aluminium", "Alumite", "Andradite",
"Antimony", "Ardite", "Ashes", "Basalt", "Bauxite", "Biotite",
"Brass", "Bronze", "Cadmium", "Calcite", "Charcoal", "Chrome",
"Cinnabar", "Clay", "Coal", "Cobalt", "Copper", "Cupronickel",
"DarkAshes", "DarkIron", "Diamond", "Electrum", "Emerald",
"EnderEye", "EnderPearl", "Endstone", "Flint", "Glowstone", "Gold", "Graphite",
"Grossular", "Gunpowder", "Indium", "Invar", "Iridium", "Iron", "Lapis", "Lazurite",
"Lead", "Limestone", "Lodestone", "Magnesium", "Magnetite", "Manganese",
"Manyullyn", "Marble", "Mithril", "Netherrack", "Nichrome", "Nickel",
"Obsidian", "Osmium", "Peridot", "Phosphorous", "Platinum", "PotassiumFeldspar",
"Pyrite", "Pyrope", "RedGarnet", "Redrock", "Redstone", "Ruby", "Saltpeter",
"Sapphire", "Silicon", "Sodalite", "Spessartine", "Sphalerite",
"Steel", "Sulfur", "Tellurium", "Teslatite", "Tetrahedrite", "Tin",
"Titanium", "Tungsten", "Uvarovite", "Vinteum", "Voidstone", "YellowGarnet",
"Zinc" };
private IIcon[] textures;
public ItemDustTiny()
{
setUnlocalizedName("techreborn.dusttiny");
setHasSubtypes(true);
setCreativeTab(TechRebornCreativeTabMisc.instance);
}
@Override
// Registers Textures For All Dusts
public void registerIcons(IIconRegister iconRegister)
{
textures = new IIcon[types.length];
for (int i = 0; i < types.length; ++i)
{
textures[i] = iconRegister.registerIcon("techreborn:" + "tinyDust/tiny"
+ types[i] + "Dust");
}
}
@Override
// Adds Texture what match's meta data
public IIcon getIconFromDamage(int meta)
{
if (meta < 0 || meta >= textures.length)
{
meta = 0;
}
return textures[meta];
}
@Override
// gets Unlocalized Name depending on meta data
public String getUnlocalizedName(ItemStack itemStack)
{
int meta = itemStack.getItemDamage();
if (meta < 0 || meta >= types.length)
{
meta = 0;
}
return super.getUnlocalizedName() + "." + types[meta];
}
// Adds Dusts SubItems To Creative Tab
public void getSubItems(Item item, CreativeTabs creativeTabs, List list)
{
for (int meta = 0; meta < types.length; ++meta)
{
list.add(new ItemStack(item, 1, meta));
}
}
@Override
public EnumRarity getRarity(ItemStack itemstack)
{
return EnumRarity.uncommon;
}
}
|
Fixed copy paste error
I should really download eclipse...
|
src/main/java/techreborn/items/ItemDustTiny.java
|
Fixed copy paste error
|
|
Java
|
mit
|
8ff8922a66d73bc3bcdfe37fc03661a027a258e6
| 0
|
Raizlabs/DBFlow,janzoner/DBFlow,mickele/DBFlow,Raizlabs/DBFlow,mickele/DBFlow
|
package com.raizlabs.android.dbflow.test.sql;
import android.database.Cursor;
import android.support.annotation.NonNull;
import com.raizlabs.android.dbflow.config.FlowManager;
import com.raizlabs.android.dbflow.sql.SQLiteType;
import com.raizlabs.android.dbflow.sql.language.Select;
import com.raizlabs.android.dbflow.sql.migration.AlterTableMigration;
import com.raizlabs.android.dbflow.sql.migration.IndexMigration;
import com.raizlabs.android.dbflow.sql.migration.UpdateTableMigration;
import com.raizlabs.android.dbflow.test.FlowTestCase;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class MigrationTest extends FlowTestCase {
@Test
public void testMigration() {
List<String> columnNames = Arrays.asList("`fraction` REAL", "`time` INTEGER", "`name2` TEXT", "`number` INTEGER", "`blobby` BLOB");
List<String> columns = Arrays.asList("fraction", "time", "name2", "number", "blobby");
AlterTableMigration<MigrationModel> renameMigration = new AlterTableMigration<>(MigrationModel.class).renameFrom("TestModel");
renameMigration.onPreMigrate();
assertEquals("ALTER TABLE `TestModel` RENAME TO `MigrationModel`", renameMigration.getRenameQuery());
renameMigration.onPostMigrate();
AlterTableMigration<MigrationModel> alterTableMigration = new AlterTableMigration<>(MigrationModel.class);
alterTableMigration.addColumn(SQLiteType.REAL, "fraction")
.addColumn(SQLiteType.INTEGER, "time")
.addColumn(SQLiteType.TEXT, "name2")
.addColumn(SQLiteType.INTEGER, "number")
.addColumn(SQLiteType.BLOB, "blobby");
alterTableMigration.onPreMigrate();
List<String> columnDefinitions = alterTableMigration.getColumnDefinitions();
for (int i = 0; i < columnDefinitions.size(); i++) {
assertEquals("ALTER TABLE `MigrationModel` ADD COLUMN " + columnNames.get(i), columnDefinitions.get(i));
}
alterTableMigration.migrate(FlowManager.getDatabaseForTable(MigrationModel.class).getWritableDatabase());
// test the column sizes
Cursor cursor = new Select().from(MigrationModel.class).where().query();
assertTrue(cursor.getColumnNames().length == columnNames.size() + 1);
try {
Thread.sleep(200);
} catch (InterruptedException e) {
}
// make sure column exists now
for (int i = 0; i < columns.size(); i++) {
assertTrue(cursor.getColumnIndex(columns.get(i)) != -1);
}
cursor.close();
alterTableMigration.onPostMigrate();
}
@Test
public void testUpdateMigration() {
UpdateTableMigration<MigrationModel> updateTableMigration
= new UpdateTableMigration<>(MigrationModel.class)
.set(MigrationModel_Table.name.is("test")).where(MigrationModel_Table.name.is("notTest"));
updateTableMigration.onPreMigrate();
assertEquals("UPDATE `MigrationModel` SET `name`='test' WHERE `name`='notTest'", updateTableMigration
.getUpdateStatement().getQuery().trim());
updateTableMigration.migrate(FlowManager.getDatabaseForTable(MigrationModel.class).getWritableDatabase());
updateTableMigration.onPostMigrate();
}
@Test
public void testSqlFile() {
/*MigrationModel migrationModel = new MigrationModel();
migrationModel.setName("test");
migrationModel.save();
Cursor cursor = new Select().from(MigrationModel.class).query();
assertTrue(cursor.moveToFirst());
int addedColumIndex = cursor.getColumnIndex("addedColumn");
assertFalse(addedColumIndex == -1);
cursor.close();*/
// broken with junit tests
}
public void testIndexMigration() {
IndexMigration<TestModel3> indexMigration
= new IndexMigration<TestModel3>(TestModel3.class) {
@NonNull
@Override
public String getName() {
return "MyIndex";
}
}
.addColumn(TestModel3_Table.type);
assertEquals("CREATE INDEX IF NOT EXISTS `MyIndex` ON `TestModel32`(`type`)", indexMigration.getIndexQuery().trim());
}
}
|
dbflow-tests/src/test/java/com/raizlabs/android/dbflow/test/sql/MigrationTest.java
|
package com.raizlabs.android.dbflow.test.sql;
import android.database.Cursor;
import android.support.annotation.NonNull;
import com.raizlabs.android.dbflow.config.FlowManager;
import com.raizlabs.android.dbflow.sql.SQLiteType;
import com.raizlabs.android.dbflow.sql.language.Select;
import com.raizlabs.android.dbflow.sql.migration.AlterTableMigration;
import com.raizlabs.android.dbflow.sql.migration.IndexMigration;
import com.raizlabs.android.dbflow.sql.migration.UpdateTableMigration;
import com.raizlabs.android.dbflow.test.FlowTestCase;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class MigrationTest extends FlowTestCase {
@Test
public void testMigration() {
List<String> columnNames = Arrays.asList("`fraction` REAL", "`time` INTEGER", "`name2` TEXT", "`number` INTEGER", "`blobby` BLOB");
List<String> columns = Arrays.asList("fraction", "time", "name2", "number", "blobby");
AlterTableMigration<MigrationModel> renameMigration = new AlterTableMigration<>(MigrationModel.class).renameFrom("TestModel");
renameMigration.onPreMigrate();
assertEquals("ALTER TABLE `TestModel` RENAME TO `MigrationModel`", renameMigration.getRenameQuery());
renameMigration.onPostMigrate();
AlterTableMigration<MigrationModel> alterTableMigration = new AlterTableMigration<>(MigrationModel.class);
alterTableMigration.addColumn(SQLiteType.REAL, "fraction")
.addColumn(SQLiteType.INTEGER, "time")
.addColumn(SQLiteType.TEXT, "name2")
.addColumn(SQLiteType.INTEGER, "number")
.addColumn(SQLiteType.BLOB, "blobby");
alterTableMigration.onPreMigrate();
List<String> columnDefinitions = alterTableMigration.getColumnDefinitions();
for (int i = 0; i < columnDefinitions.size(); i++) {
assertEquals("ALTER TABLE `MigrationModel` ADD COLUMN " + columnNames.get(i), columnDefinitions.get(i));
}
alterTableMigration.migrate(FlowManager.getDatabaseForTable(MigrationModel.class).getWritableDatabase());
// test the column sizes
Cursor cursor = new Select().from(MigrationModel.class).where().query();
assertTrue(cursor.getColumnNames().length == columnNames.size() + 1);
// make sure column exists now
for (int i = 0; i < columns.size(); i++) {
assertTrue(cursor.getColumnIndex(columns.get(i)) != -1);
}
cursor.close();
alterTableMigration.onPostMigrate();
}
@Test
public void testUpdateMigration() {
UpdateTableMigration<MigrationModel> updateTableMigration
= new UpdateTableMigration<>(MigrationModel.class)
.set(MigrationModel_Table.name.is("test")).where(MigrationModel_Table.name.is("notTest"));
updateTableMigration.onPreMigrate();
assertEquals("UPDATE `MigrationModel` SET `name`='test' WHERE `name`='notTest'", updateTableMigration
.getUpdateStatement().getQuery().trim());
updateTableMigration.migrate(FlowManager.getDatabaseForTable(MigrationModel.class).getWritableDatabase());
updateTableMigration.onPostMigrate();
}
@Test
public void testSqlFile() {
/*MigrationModel migrationModel = new MigrationModel();
migrationModel.setName("test");
migrationModel.save();
Cursor cursor = new Select().from(MigrationModel.class).query();
assertTrue(cursor.moveToFirst());
int addedColumIndex = cursor.getColumnIndex("addedColumn");
assertFalse(addedColumIndex == -1);
cursor.close();*/
// broken with junit tests
}
public void testIndexMigration() {
IndexMigration<TestModel3> indexMigration
= new IndexMigration<TestModel3>(TestModel3.class) {
@NonNull
@Override
public String getName() {
return "MyIndex";
}
}
.addColumn(TestModel3_Table.type);
assertEquals("CREATE INDEX IF NOT EXISTS `MyIndex` ON `TestModel32`(`type`)", indexMigration.getIndexQuery().trim());
}
}
|
small change
|
dbflow-tests/src/test/java/com/raizlabs/android/dbflow/test/sql/MigrationTest.java
|
small change
|
|
Java
|
mit
|
87b03c8cea856e491113b5204c02cab597150357
| 0
|
Pardot/Rhombus,ybrs/Rhombus,Pardot/Rhombus,ybrs/Rhombus
|
package com.pardot.rhombus.cli;
import com.pardot.rhombus.cobject.CKeyspaceDefinition;
import com.pardot.rhombus.util.JsonUtil;
import org.apache.commons.cli.*;
import java.io.IOException;
/**
* User: Rob Righter
* Date: 8/17/13
* Time: 11:06 AM
*/
public class RhombusCli implements RhombusCommand {
public CKeyspaceDefinition keyspaceDefinition;
public static Options makeBootstrapOptions(){
Options ret = new Options();
Option help = new Option( "help", "print this message" );
ret.addOption(help);
return ret;
}
public Options getCommandOptions(){
Options ret = makeBootstrapOptions();
Option keyspaceFile = OptionBuilder.withArgName( "filename" )
.hasArg()
.withDescription("Filename of json keyspace definition")
.create( "keyspacefile" );
Option keyspaceResource = OptionBuilder.withArgName( "filename" )
.hasArg()
.withDescription("Filename of json keyspace definition")
.create( "keyspaceresource" );
ret.addOption(keyspaceFile);
ret.addOption(keyspaceResource);
return ret;
}
public void executeCommand(CommandLine cl){
if(!(cl.hasOption("keyspacefile") || cl.hasOption("keyspaceresource"))){
displayHelpMessageAndExit();
return;
}
String keyspaceFileName = cl.hasOption("keyspacefile") ? cl.getOptionValue("keyspacefile") : cl.getOptionValue("keyspaceresource");
//make the keyspace definition
CKeyspaceDefinition keyDef = null;
try{
keyDef = cl.hasOption("keyspacefile") ?
JsonUtil.objectFromJsonFile(CKeyspaceDefinition.class,CKeyspaceDefinition.class.getClassLoader(), keyspaceFileName) :
JsonUtil.objectFromJsonResource(CKeyspaceDefinition.class,CKeyspaceDefinition.class.getClassLoader(), keyspaceFileName);
}
catch (IOException e){
System.out.println("Could not parse keyspace file "+keyspaceFileName);
System.exit(1);
}
if(keyDef == null){
System.out.println("Could not parse keyspace file "+keyspaceFileName);
System.exit(1);
}
this.keyspaceDefinition = keyDef;
}
public void displayHelpMessageAndExit(){
HelpFormatter formatter = new HelpFormatter();
String cmdName = this.getClass().getName().replaceAll("com.pardot.rhombus.cli.commands.","");
formatter.printHelp( "RhombusCli "+cmdName, getCommandOptions());
System.exit(1);
}
public static void main( String[] args ) {
// create the parser
CommandLineParser parser = new BasicParser();
try {
// make sure they gave us a command
if( args.length == 0) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp( "RhombusCli", makeBootstrapOptions() );
System.exit(1);
}
//Load up the class
//if the class name is not fully qualified we assume its in com.pardot.rhombus.cli.commands
String className = args[0];
if(!className.contains(".")){
className = "com.pardot.rhombus.cli.commands."+ className;
}
try{
RhombusCommand cmd = (RhombusCommand)(Class.forName(className)).newInstance();
Options commandOptions = cmd.getCommandOptions();
cmd.executeCommand(parser.parse( commandOptions, args ));
}
catch (ClassNotFoundException e){
System.out.println("Could not find Command Class "+className);
}
catch (IllegalAccessException e){
System.out.println("Could not access Command Class "+className);
}
catch (InstantiationException e){
System.out.println("Could not instantiate Command Class "+className);
}
}
catch( ParseException exp ) {
// oops, something went wrong
System.err.println( "Parsing failed. Reason: " + exp.getMessage() );
}
}
}
|
src/main/java/com/pardot/rhombus/cli/RhombusCli.java
|
package com.pardot.rhombus.cli;
import com.pardot.rhombus.cobject.CKeyspaceDefinition;
import com.pardot.rhombus.util.JsonUtil;
import org.apache.commons.cli.*;
import java.io.IOException;
/**
* User: Rob Righter
* Date: 8/17/13
* Time: 11:06 AM
*/
public class RhombusCli implements RhombusCommand {
public CKeyspaceDefinition keyspaceDefinition;
public static Options makeBootstrapOptions(){
Options ret = new Options();
Option help = new Option( "help", "print this message" );
ret.addOption(help);
return ret;
}
public Options getCommandOptions(){
Options ret = makeBootstrapOptions();
Option keyspaceFile = OptionBuilder.withArgName( "filename" )
.hasArg()
.withDescription("Filename of json keyspace definition")
.create( "keyspacefile" );
Option keyspaceResource = OptionBuilder.withArgName( "filename" )
.hasArg()
.withDescription("Filename of json keyspace definition")
.create( "keyspaceResource" );
ret.addOption(keyspaceFile);
ret.addOption(keyspaceResource);
return ret;
}
public void executeCommand(CommandLine cl){
if(!(cl.hasOption("keyspacefile") || cl.hasOption("keyspaceresource"))){
displayHelpMessageAndExit();
return;
}
String keyspaceFileName = cl.hasOption("keyspacefile") ? cl.getOptionValue("keyspacefile") : cl.getOptionValue("keyspaceresource");
//make the keyspace definition
CKeyspaceDefinition keyDef = null;
try{
keyDef = cl.hasOption("keyspacefile") ?
JsonUtil.objectFromJsonFile(CKeyspaceDefinition.class,CKeyspaceDefinition.class.getClassLoader(), keyspaceFileName) :
JsonUtil.objectFromJsonResource(CKeyspaceDefinition.class,CKeyspaceDefinition.class.getClassLoader(), keyspaceFileName);
}
catch (IOException e){
System.out.println("Could not parse keyspace file "+keyspaceFileName);
System.exit(1);
}
if(keyDef == null){
System.out.println("Could not parse keyspace file "+keyspaceFileName);
System.exit(1);
}
this.keyspaceDefinition = keyDef;
}
public void displayHelpMessageAndExit(){
HelpFormatter formatter = new HelpFormatter();
String cmdName = this.getClass().getName().replaceAll("com.pardot.rhombus.cli.commands.","");
formatter.printHelp( "RhombusCli "+cmdName, getCommandOptions());
System.exit(1);
}
public static void main( String[] args ) {
// create the parser
CommandLineParser parser = new BasicParser();
try {
// make sure they gave us a command
if( args.length == 0) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp( "RhombusCli", makeBootstrapOptions() );
System.exit(1);
}
//Load up the class
//if the class name is not fully qualified we assume its in com.pardot.rhombus.cli.commands
String className = args[0];
if(!className.contains(".")){
className = "com.pardot.rhombus.cli.commands."+ className;
}
try{
RhombusCommand cmd = (RhombusCommand)(Class.forName(className)).newInstance();
Options commandOptions = cmd.getCommandOptions();
cmd.executeCommand(parser.parse( commandOptions, args ));
}
catch (ClassNotFoundException e){
System.out.println("Could not find Command Class "+className);
}
catch (IllegalAccessException e){
System.out.println("Could not access Command Class "+className);
}
catch (InstantiationException e){
System.out.println("Could not instantiate Command Class "+className);
}
}
catch( ParseException exp ) {
// oops, something went wrong
System.err.println( "Parsing failed. Reason: " + exp.getMessage() );
}
}
}
|
fixed issue wherein embedded keyspace resource was not recognized at the command line
|
src/main/java/com/pardot/rhombus/cli/RhombusCli.java
|
fixed issue wherein embedded keyspace resource was not recognized at the command line
|
|
Java
|
mit
|
error: pathspec 'PoetryBot.java' did not match any file(s) known to git
|
70d639b75fd53988170b72f278ee30b71eef3227
| 1
|
Naftoreiclag/Poetry-Generator
|
import java.io.UnsupportedEncodingException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Scanner;
public class PoetryBot
{
public static int POETRYBOTVERSION = 1;
private void run()
{
Scanner listener = new Scanner(System.in);
System.out.println("Hello! I am Poetry Bot.");
System.out.println("I am poetry bot #" + POETRYBOTVERSION);
System.out.println();
System.out.print("What would you like me to write about? ");
String input = listener.nextLine();
System.out.println();
System.out.println("I will write about " + '"' + input + '"' + ".");
byte[] hash = getSHA512(input);
for(byte b : hash)
{
System.out.println(b);
}
listener.close();
}
private byte[] getSHA512(String input)
{
MessageDigest sHAer = null;
byte[] hash = null;
try
{
sHAer = MessageDigest.getInstance("SHA-512");
}
catch (NoSuchAlgorithmException e1) { e1.printStackTrace(); }
try
{
hash = sHAer.digest(input.getBytes("UTF-8"));
}
catch (UnsupportedEncodingException e) { e.printStackTrace(); }
return hash;
}
public static void main(String[] args)
{
PoetryBot pb = new PoetryBot();
pb.run();
}
}
|
PoetryBot.java
|
Added source code
|
PoetryBot.java
|
Added source code
|
|
Java
|
mit
|
error: pathspec 'GuiAbbotCostello.java' did not match any file(s) known to git
|
029db49a4043c1211d9b8d90bc8b19a03b0b2542
| 1
|
mattdelashaw/JAVA-PERL
|
import javax.swing.JOptionPane;
class GuiAbbotCostello {
public static void main(String[] args) {
String[] bases = new String[3];
Object[] playerNames = {"Who", "What", "I Don\'t Know"};
Object inputObject = new Object();
inputObject = JOptionPane.showInputDialog(null,"1st baseman?","1st Baseman",JOptionPane.PLAIN_MESSAGE, null, playerNames, playerNames[0]);
bases[0] = inputObject.toString();
bases[1] = (String)JOptionPane.showInputDialog(null, "2nd baseman?","2nd Baseman", JOptionPane.PLAIN_MESSAGE, null, playerNames, playerNames[1]);
bases[2] = (String)JOptionPane.showInputDialog(null,"3rd baseman?", "3rd Baseman", JOptionPane.PLAIN_MESSAGE, null, playerNames, playerNames[2]);
JOptionPane.showMessageDialog(null,""+bases[0]+" is on first.\n" +bases[1]+" is on second.\n"+bases[2]+" is on third.","Basemen",JOptionPane.PLAIN_MESSAGE);
}
}
|
GuiAbbotCostello.java
|
Create GuiAbbotCostello.java
|
GuiAbbotCostello.java
|
Create GuiAbbotCostello.java
|
|
Java
|
mit
|
error: pathspec 'TemplateMethod/Java/Main.java' did not match any file(s) known to git
|
86db4ea0a578e17225e0a2d82eeff1749d1537fc
| 1
|
wrymax/design-pattern-examples
|
// 检测汽车质量的抽象模板类
abstract class AbstractQualityChecker {
/* 一组抽象方法,让子类重写 */
// 检测启动
abstract void startup();
// 检车加速
abstract void speedup();
// 检测制动
abstract void brake();
// 检测停止
abstract void stop();
// 控制检测流程,声明为final,防止子类重写
public final void checkQuality() {
startup();
speedup();
brake();
stop();
System.out.println("--- 检测完成!---\n");
}
}
// 实现1. 保时捷911的质量检测
class QualityChecker911 extends AbstractQualityChecker {
void startup() {
System.out.println("检测保时捷911的启动性能...");
}
void speedup() {
System.out.println("检测保时捷911的加速性能...");
}
void brake() {
System.out.println("检测保时捷911的制动性能...");
}
void stop() {
System.out.println("检测保时捷911的停止性能...");
}
}
// 实现2. 保时捷Cayma的质量检测
class QualityCheckerCayma extends AbstractQualityChecker {
void startup() {
System.out.println("检测保时捷Cayma的启动性能...");
}
void speedup() {
System.out.println("检测保时捷Cayma的加速性能...");
}
void brake() {
System.out.println("检测保时捷Cayma的制动性能...");
}
void stop() {
System.out.println("检测保时捷Cayma的停止性能...");
}
}
public class Main {
public static void main(String[] args) {
// 911 checker
QualityChecker911 checker911 = new QualityChecker911();
// Cayma checker
QualityCheckerCayma checkerCayma = new QualityCheckerCayma();
checker911.checkQuality();
checkerCayma.checkQuality();
}
}
|
TemplateMethod/Java/Main.java
|
add TemplateMethod pattern
|
TemplateMethod/Java/Main.java
|
add TemplateMethod pattern
|
|
Java
|
mit
|
error: pathspec 'java/src/main/java/Lotto.java' did not match any file(s) known to git
|
73196c869572d19279c8d9b1fd856a4de4787c1c
| 1
|
funfunStudy/algorithm,funfunStudy/algorithm,funfunStudy/algorithm,funfunStudy/algorithm,funfunStudy/algorithm
|
import java.util.*;
import java.util.stream.Collectors;
public class Lotto {
private static List<List<Integer>> results = new ArrayList<>();
public static void main(String[] args) {
Scanner sc = new Scanner(System.in);
String currentLine = sc.nextLine();
while (!currentLine.equals("0")) {
printLotto(currentLine);
currentLine = sc.nextLine();
System.out.println();
}
}
private static void printLotto(String currentLine) {
List<Integer> inputs = Arrays.stream(currentLine.split(" "))
.map(Integer::valueOf)
.collect(Collectors.toList());
int k = inputs.get(0);
List<Integer> tailList = inputs.subList(1, inputs.size());
lotto(k, tailList);
results.stream()
.sorted(getLottoComparator())
.map(result -> result.stream().map(String::valueOf).collect(Collectors.joining(" ")))
.distinct()
.forEach(System.out::println);
}
private static void lotto(int k, List<Integer> list) {
if (list.size() == 6) {
results.add(list);
} else {
for (int i = 0; i < k; i++) {
List<Integer> subList = new ArrayList<>(list);
subList.remove(i);
lotto(k - 1, subList);
}
}
}
private static Comparator<List<Integer>> getLottoComparator() {
return (s1, s2) -> {
for (int i = 0; i < 6; i++) {
if (s1.get(i) > s2.get(i)) {
return 1;
} else if (s1.get(i) < s2.get(i)) {
return -1;
}
}
return 0;
};
}
}
|
java/src/main/java/Lotto.java
|
Lotto Problems solved by Java
|
java/src/main/java/Lotto.java
|
Lotto Problems solved by Java
|
|
Java
|
mit
|
error: pathspec 'mfr-compat-magical-crops.java' did not match any file(s) known to git
|
40f2a789e7b73c7b6f33297fc2ea76787c9f1d3b
| 1
|
mechaet/mfr-compat-magical-crops
|
package mechaet.mfr.compat.magicalcrops;
import java.lang.reflect.Method;
import net.minecraft.block.Block;
import net.minecraft.item.Item;
import net.minecraft.world.World;
import powercrystals.minefactoryreloaded.MFRRegistry;
import powercrystals.minefactoryreloaded.MineFactoryReloadedCore;
import powercrystals.minefactoryreloaded.api.HarvestType;
import powercrystals.minefactoryreloaded.farmables.plantables.PlantableStandard;
import powercrystals.minefactoryreloaded.farmables.fertilizables.FertilizableSapling;
import powercrystals.minefactoryreloaded.farmables.harvestables.HarvestableStandard;
import powercrystals.minefactoryreloaded.farmables.harvestables.HarvestableTreeLeaves;
import cpw.mods.fml.common.FMLLog;
import cpw.mods.fml.common.Loader;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.Mod.EventHandler;
import cpw.mods.fml.common.event.FMLInitializationEvent;
import cpw.mods.fml.common.event.FMLPostInitializationEvent;
import cpw.mods.fml.common.network.NetworkMod;
import cpw.mods.fml.common.registry.GameData;
import cpw.mods.fml.common.registry.GameRegistry;
@Mod(modid = "MineFactoryReloaded|CompatMagicalCrops", name = "MFR Compat: MagicalCrops", version = 0.1a,
dependencies = "after:MineFactoryReloaded;after:magicalcrops")
@NetworkMod(clientSideRequired = false, serverSideRequired = false)
public class MagicalCropsMFRCompat
{
@Instance(value = "MFRCompatMagicalCrops")
public static MagicalCropsMFRCompat instance;
private static final Logger _log;
static {
//Init the logger
_log = Logger.getLogger("mechaet.mfr.compat.magicalcrops.MagicalCropsMFRCompat");
_log.setParent(FMLLog.getLogger());
}
@EventHandler
public void preInit(FMLPreInitializationEvent event)
{
instance = this;
}
private void registerMfr() throws Exception
{
//I enumerate the materials to increase the chances of working with future versions
//but at the cost of not automatically working with new plants.
//Registering materials
String[] mMaterials =
{
"Alumin",
"Blaze",
"Coal",
"Copper",
"Diamond",
"Dye",
"Emerald",
"Ender",
"Glowstone",
"Gold",
"Iron",
"Lapis",
"Lead",
"Nether",
"Obsidian",
"Peridot",
"Quartz",
"Redstone",
"Ruby",
"Sapphire",
"Silver",
"Tin",
};
registerMfr(mMaterials, "m");
//Registering elements
String[] eMaterials =
{
"Water",
"Fire",
"Earth",
"Air",
};
registerMfr(eMaterials, "e");
//Registering your soul
String[] soulMaterials =
{
"Cow",
"Creeper",
"Skeleton",
"Slime",
"Skeleton",
"Spider",
"Ghast",
};
registerMfr(soulMaterials, "soul");
}
private void registerMfr(String[] materials, String prefix) throws Exception
{
Constructor plantableconstructor = null;
Constructor harvestconstructor = null;
//Yes, it is wasteful to do this reflection once for each type, but it should only really
//add miliseconds.
try {
//Planter
Class plantableClass = Class.forName("powercrystals.minefactoryreloaded.farmables.plantables.PlantableCropPlant");
Constructor[] plantableAllConstructors = plantableClass.getDeclaredConstructors();
for (Constructor ctor : plantableAllConstructors) {
Class<?>[] pType = ctor.getParameterTypes();
if( pType.length != 2 || pType[0] != int.class || pType[1] != int.class )
{
continue;
}
plantableconstructor = ctor;
}
//Harvester
Class harvestClass = Class.forName("powercrystals.minefactoryreloaded.farmables.harvestables.HarvestableCropPlant");
Constructor[] harvestAllConstructors = harvestClass.getDeclaredConstructors();
for (Constructor ctor : harvestAllConstructors) {
Class<?>[] pType = ctor.getParameterTypes();
if( pType.length != 2 || pType[0] != int.class || pType[1] != int.class )
{
continue;
}
harvestconstructor = ctor;
}
} catch (ClassNotFoundException e) {
throw new RuntimeException("Failed to init mfr Planter/Harvester classes",e);
}
//Base class of the MC mod
Class cropsModClass = Class.forName("magicalcrops.mod_mCrops");
//Loop through all the crops that were passed in and add them to the list.
for( String material : materials )
{
//Shared
String cropName = prefix+"Crop"+material;
String seedName = prefix.charAt(0)+"Seeds"+material;
Block crop = (Block)cropsModClass.getField(cropName).get(null);
Item seed = (Item) cropsModClass.getField(seedName).get(null);
if( crop == null || seed == null )
{
_log.warning("Unable to find crop "+cropName+" by reflection. Maybe that crop is disabled or something?");
continue;
}
//Planter
if( !(seed instanceof IPlantable) )
{
throw new IllegalArgumentException("Seed "+seed.getUnlocalizedName()+" is not IPlantable. Type is "+seed.getClass());
};
//Using MFR code to register for Planter
MFRRegistry.registerPlantable(new PlantableStandard(seed.itemID, crop.blockID));
//IFactoryPlantable plantable = (IFactoryPlantable) plantableconstructor.newInstance(seed.itemID, crop.blockID);
//FactoryRegistry.registerPlantable(plantable);
////Using MFR code to register for Harvester
MFRRegistry.registerHarvestable(new HarvestableCropPlant(crop.blockID,7));
//IFactoryHarvestable harvestable = (IFactoryHarvestable) harvestconstructor.newInstance(crop.blockID,7);
//FactoryRegistry.registerHarvestable(harvestable);
_log.finer("Registered crop "+cropName+" with the MFR planter and harvester.");
}
}
public static void postInit(FMLPostInitializationEvent e)
{
if(!Loader.isModLoaded("magicalcrops"))
{
FMLLog.warning("magicalcrops missing - MFR Magical Crops Compat not loading");
return;
}
try
{
_log.log(Level.INFO, "Loading Magical Crops for MFR");
//Doing this in post-init to minimize problems with Magical Crops
//not having finished init yet.
registerMfr();
}
catch (Exception ex)
{
ex.printStackTrace();
}
}
}
|
mfr-compat-magical-crops.java
|
Added new compat file
This is the compatibility file for hooking together MFR and Magical
Crops.
I borrowed the code for reflection from PatMo98, and instituted MFR
registrars for the crops.
|
mfr-compat-magical-crops.java
|
Added new compat file
|
|
Java
|
mit
|
error: pathspec 'src/player/OptimalMultiPlayerAI.java' did not match any file(s) known to git
|
816cd26b87de8ecf1b80db605d00a0150c95d97a
| 1
|
rasmusgreve/yahtzee
|
package player;
import java.util.Arrays;
import util.Persistence;
import util.YahtzeeMath;
import game.Answer;
import game.GameLogic;
import game.Question;
import game.Scoreboard;
import game.Scoreboard.ScoreType;
public class OptimalMultiPlayerAI implements Player {
protected int id;
public double[] boardValues;
public static final String filename = "optimalPlayerCache.bin";
public OptimalMultiPlayerAI(){
}
@Override
public Answer PerformTurn(Question question) {
Answer ans = new Answer();
if (question.rollsLeft == 0)
ans.selectedScoreEntry = getBestScoreEntry(question.roll, question.scoreboards[question.playerId].ConvertMapToInt());
else
ans.diceToHold = getBestHold(question.roll, question.rollsLeft, question.scoreboards[question.playerId].ConvertMapToInt());
return ans;
}
private ScoreType getBestScoreEntry(int[] roll, int board){
int rollC = YahtzeeMath.colex(roll);
int best = -1;
double max = Double.NEGATIVE_INFINITY;
for (int type = 0; type < ScoreType.count; type++) {
if (Scoreboard.isFilled(board, type)) continue; //Skip filled entries
int value_of_roll = GameLogic.valueOfRoll(type, rollC);
int new_board = Scoreboard.fill(board, type, value_of_roll);
double newVal = getBoardValue(new_board) + value_of_roll;
if (newVal > max){
max = newVal;
best = type;
}
}
return ScoreType.values()[best];
}
public double getBoardValue(int board) {
if (boardValues[board] == -1) {
if (Scoreboard.isFull(board))
{
boardValues[board] = Scoreboard.bonus(board);
}
else
{
boardValues[board] = rollFromScoreboard(board);
}
}
return boardValues[board];
}
private boolean[] getBestHold(int[] roll, int rollsLeft, int board){
return null;
}
private double rollFromScoreboard(int board) {
// double s = 0;
// double[] cache = newRollValuesCache();
// for (int i = 0; i < YahtzeeMath.allRolls.length; i++) {
// double v = valueOfRoll(YahtzeeMath.colex(YahtzeeMath.allRolls[i]), 2, board, cache);
// s += v * YahtzeeMath.prob(5,YahtzeeMath.allRolls[i]);
// }
// return s;
return 0;
}
@Override
public String getName() {
return "Optimal multi player AI";
}
@Override
public void reset(int id) {
this.id = id;
}
@Override
public void cleanUp() {
Persistence.storeArray(boardValues, filename);
}
}
|
src/player/OptimalMultiPlayerAI.java
|
Started optimal MultiPlayerAI
|
src/player/OptimalMultiPlayerAI.java
|
Started optimal MultiPlayerAI
|
|
Java
|
mit
|
error: pathspec 'java/com/htmlspeed/server/FlashRsrc.java' did not match any file(s) known to git
|
8c03f1405fbf959da8c800b44a2c9ae1925c315e
| 1
|
TimelyPick/HtmlSpeed
|
/**
* Copyright 2011 Galiel 3.14 Ltd. All rights reserved.
* Use is subject to license terms.
*
* Created on 17 December 2012
*/
package com.htmlspeed.server;
/**
* FlashRsrc.
*
* A swf file (flash presentation).
* Used for caching flash files.
*
* @author Eldad Zamler
* @version $Revision: 1.2 $$Date: 2012/12/17 06:33:17 $
*/
public class FlashRsrc extends Rsrc
{
}
|
java/com/htmlspeed/server/FlashRsrc.java
|
Create FlashRsrc.java
|
java/com/htmlspeed/server/FlashRsrc.java
|
Create FlashRsrc.java
|
|
Java
|
mit
|
error: pathspec 'src/model/supervised/linearmodel/LinearGradientDecent.java' did not match any file(s) known to git
|
79c1bc04655e1119d6730864a857e9da93ac12f6
| 1
|
heroxdream/MachineLearning
|
package model.supervised.linearmodel;
import algorithms.gradient.Decent;
import algorithms.gradient.DecentType;
import algorithms.gradient.GradientDecent;
import algorithms.gradient.NewtonDecent;
import com.google.common.util.concurrent.AtomicDouble;
import data.DataSet;
import model.Predictable;
import model.Trainable;
import org.apache.commons.math3.linear.MatrixUtils;
import org.apache.commons.math3.linear.RealMatrix;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.Arrays;
import java.util.stream.IntStream;
/**
* Created by hanxuan on 9/30/15 for machine_learning.
*/
public class LinearGradientDecent implements Predictable, Trainable, Decent, GradientDecent , NewtonDecent{
public static double ALPHA = 0.01; // learning rate
public static double LAMBDA = 0.0; // punish rate
public static int BUCKET_COUNT = 1; // mini batch
private static final Logger log = LogManager.getLogger(LinearGradientDecent.class);
protected DecentType type = DecentType.GRADIENT;
protected DataSet data = null;
protected RealMatrix w = null;
public LinearGradientDecent() {}
@Override
public double predict(double[] feature) {
RealMatrix featureMatrix = MatrixUtils.createRowRealMatrix(feature);
return featureMatrix.multiply(w).getEntry(0, 0);
// return featureMatrix.multiply(w).getEntry(0, 0) > 0.5 ? 1 : 0;
}
@Override
public void train() {
double[] initTheta = new double[data.getFeatureLength()];
double finalCost = loop(data, BUCKET_COUNT, initTheta);
log.info("Training finished, final cost: {}", finalCost);
w = MatrixUtils.createColumnRealMatrix(initTheta);
}
@Override
public Predictable offer() {
return this;
}
@Override
public void initialize(DataSet d) {
this.data = d;
}
@Override
public double[] gGradient(DataSet data, int start, int end, double[] theta) {
double[] g = new double[theta.length];
IntStream.range(0, g.length).forEach(
i -> IntStream.range(start, end).parallel().forEach(
j -> {
double[] X = data.getInstance(j);
g[i] += (hypothesis(X, theta) - data.getLabel(j)) * X[i] / (end - start);
}
)
);
log.debug("theta: {}",theta);
log.debug("g : {}", g);
IntStream.range(1, g.length).forEach(i -> g[i] += LAMBDA * theta[i] / (end - start));
IntStream.range(0, theta.length).forEach(i -> theta[i] -= ALPHA * g[i]);
return theta;
}
@Override
public double cost(DataSet data, double[] theta) {
int instanceLength = data.getInstanceLength();
AtomicDouble cost = new AtomicDouble(0);
IntStream.range(0, instanceLength).parallel().forEach(
i -> cost.getAndAdd(
Math.pow((data.getLabel(i) - hypothesis(data.getInstance(i), theta)), 2)
)
);
cost.getAndSet(cost.doubleValue() / (double) instanceLength / 2.0);
double punish = LAMBDA * Arrays.stream(theta).map(x -> Math.pow(x, 2)).sum() / 2 / instanceLength;
cost.getAndAdd(punish);
return cost.doubleValue();
}
@Override
public void parameterGradient(DataSet data, int start, int end, double[] theta) {
if (type == DecentType.GRADIENT) {
gGradient(data, start, end, theta);
}else {
}
}
private double hypothesis(double[] x, double[] theta) {
return IntStream.range(0, x.length).mapToDouble(i -> x[i] * theta[i]).sum();
}
public static void main(String[] args) {
LinearGradientDecent lm = new LinearGradientDecent();
System.out.println(lm.hypothesis(new double[]{1, 2}, new double[]{3, 4}));
}
@Override
public double[] nGradient(DataSet data, int start, int end, double[] theta) {
return new double[0];
}
}
|
src/model/supervised/linearmodel/LinearGradientDecent.java
|
linear regression
|
src/model/supervised/linearmodel/LinearGradientDecent.java
|
linear regression
|
|
Java
|
mit
|
error: pathspec 'forest-core/src/test/java/com/dtflys/test/misc/URLTest.java' did not match any file(s) known to git
|
ce524dc3e94b9800c6bc5daf2a76d9d34fe02151
| 1
|
mySingleLive/forest
|
package com.dtflys.test.misc;
import com.dtflys.forest.utils.URLUtils;
import junit.framework.TestCase;
public class URLTest extends TestCase {
public void testUrl() {
assertEquals("http://www.baidu.com", URLUtils.getValidURL("http://www.baidu.com", ""));
assertEquals("http://www.baidu.com/xxx", URLUtils.getValidURL("http://www.baidu.com", "xxx"));
assertEquals("http://www.baidu.com/xxx", URLUtils.getValidURL("http://www.baidu.com/", "xxx"));
assertEquals("http://www.baidu.com/xxx", URLUtils.getValidURL("http://www.baidu.com/", "/xxx"));
assertEquals("http://www.baidu.com/xxx", URLUtils.getValidURL("http://www.baidu.com", "/xxx"));
assertEquals("www.baidu.com", URLUtils.getValidURL("www.baidu.com", ""));
assertEquals("www.baidu.com/xxx", URLUtils.getValidURL("www.baidu.com", "xxx"));
assertEquals("www.baidu.com/xxx", URLUtils.getValidURL("www.baidu.com/", "xxx"));
assertEquals("www.baidu.com/xxx", URLUtils.getValidURL("www.baidu.com/", "/xxx"));
assertEquals("www.baidu.com/xxx", URLUtils.getValidURL("www.baidu.com", "/xxx"));
assertEquals("http://www.baidu.com", URLUtils.getValidURL("http://www.baidu.com", "http://www.baidu.com"));
assertEquals("http://www.baidu.com", URLUtils.getValidURL("www.baidu.com", "http://www.baidu.com"));
assertEquals("http://www.baidu.com", URLUtils.getValidURL("google.com", "http://www.baidu.com"));
assertEquals("http://www.baidu.com", URLUtils.getValidURL("xxx", "http://www.baidu.com"));
}
}
|
forest-core/src/test/java/com/dtflys/test/misc/URLTest.java
|
test: baseUrl会自动删去 / 的问题
|
forest-core/src/test/java/com/dtflys/test/misc/URLTest.java
|
test: baseUrl会自动删去 / 的问题
|
|
Java
|
mit
|
error: pathspec 'src/hu/laszlolukacs/spacesiegebreakers/scenes/GameScene.java' did not match any file(s) known to git
|
d09527713ef5484410f7698399e0e63f358837ba
| 1
|
laszlolukacs/spacesiegebreakers
|
package hu.laszlolukacs.spacesiegebreakers.scenes;
import java.io.IOException;
import javax.microedition.lcdui.Font;
import javax.microedition.lcdui.Graphics;
import javax.microedition.lcdui.Image;
import javax.microedition.lcdui.game.GameCanvas;
import javax.microedition.lcdui.game.LayerManager;
import javax.microedition.lcdui.game.Sprite;
import javax.microedition.lcdui.game.TiledLayer;
import javax.microedition.midlet.MIDletStateChangeException;
import hu.laszlolukacs.spacesiegebreakers.SpaceSiegeBreakersMIDlet;
import hu.laszlolukacs.spacesiegebreakers.utils.Log;
public class GameScene extends GameCanvas implements Scene {
public static final String TAG = "GameScene";
private SpaceSiegeBreakersMIDlet midlet;
private Graphics g;
private long m_timeButtonLastPressed = 0;
private int m_screenWidth, m_screenHeight, m_centerHorizontal,
m_centerVertical, m_cornerX, m_cornerY;
// graphics assets
private LayerManager layMan_Game;
private LayerManager layMan_UI;
private TiledLayer tiled_Map;
private Sprite spr_Placeholder;
private Sprite[] spr_FX = new Sprite[20];
private Sprite[] spr_Minions = new Sprite[20];
private Sprite[] spr_Turrets = new Sprite[108];
private Sprite[] spr_UI_icons = new Sprite[3];
private Sprite[] spr_UI_menu = new Sprite[2];
private Sprite[] spr_UI_controls = new Sprite[3];
private Image img_Background;
private Image img_MinionDeathFX;
private Image img_Map;
private Image img_Minion;
private Image img_Placeholder;
private Image img_Turret;
private Image ui_Header;
private Image ui_HUDicons;
private Image ui_HUDcontrols;
private Image ui_Tray;
// UI descriptors
private boolean isUIControlsMoving;
private boolean isUIControlsMovingToLeft;
private int ui_CurrentMessageIndex;
private int ui_ControlSelectedIndex;
private int ui_SliderCounter;
private String sz_Credits, sz_Lives, sz_Score;
private String[] sz_CurrentInfo;
private int[] g_map = new int[] {
0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 0, 0,
0, 0, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 0, 0,
0, 0, 1, 2, 9,10,10,10,10,10,10,11, 1, 2, 0, 0,
0, 0, 3, 4,16, 0, 0, 0, 0, 0, 0,12, 3, 4, 0, 0,
0, 0, 1, 2,16, 0, 7, 8, 7, 8, 0,12, 1, 2, 0, 0,
0, 0,19,18,17, 0, 5, 6, 5, 6, 0,12, 3, 4, 0, 0,
0, 0, 0, 0, 0, 0, 7, 8, 7, 8, 0,12, 1, 2, 0, 0,
5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 0,12, 3, 4, 0, 0,
7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 0,12, 1, 2, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,12, 3, 4, 0, 0,
0, 0,13,14,14,14,14,14,14,14,14,15, 1, 2, 0, 0,
0, 0, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 0, 0,
0, 0, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
public GameScene(SpaceSiegeBreakersMIDlet midlet) {
super(true);
this.midlet = midlet;
this.g = super.getGraphics();
this.m_screenWidth = super.getWidth(); // width of the LCD
this.m_screenHeight = super.getHeight(); // height of the LCD
this.m_centerHorizontal = m_screenWidth / 2; // center of the LCD
this.m_centerVertical = m_screenHeight / 2;
this.m_cornerX = (m_screenWidth - 160) / 2; // corner of the Game Canvas
this.m_cornerY = (m_screenHeight - 180) / 2;
}
public void init() {
// TODO Auto-generated method stub
Log.i(TAG, "Loading scene resources... ");
try {
this.img_Background = Image.createImage("/legacy/abs_bkg.png");
this.img_MinionDeathFX = Image.createImage("/legacy/fx.png");
this.img_Map = Image.createImage("/legacy/map_tiles.png");
this.img_Minion = Image.createImage("/legacy/minions.png");
this.img_Placeholder = Image.createImage("/legacy/placeholder.png");
this.img_Turret = Image.createImage("/legacy/turret.png");
this.ui_Header = Image.createImage("/legacy/ui_header.png");
this.ui_HUDicons = Image.createImage("/legacy/ui_icons.png");
this.ui_HUDcontrols = Image.createImage("/legacy/ui_controls.png");
this.ui_Tray = Image.createImage("/legacy/ui_tray.png");
// creates sprites and map
Log.i(TAG, "Creating sprites...");
// creates the layer manager instance
this.layMan_Game = new LayerManager();
// the turret place picker square
this.spr_Placeholder = new Sprite(img_Placeholder, 10, 10);
this.spr_Placeholder.defineReferencePixel(5, 5);
this.spr_Placeholder.setVisible(false);
this.layMan_Game.append(spr_Placeholder);
// creates minion death effect animation sprites
this.spr_FX = new Sprite[20];
for (int i = 0; i < 20; i++) {
this.spr_FX[i] = new Sprite(this.img_MinionDeathFX, 20, 20);
this.spr_FX[i].defineReferencePixel(10, 10);
this.spr_FX[i].setVisible(false);
this.layMan_Game.append(spr_FX[i]);
}
// creates turret sprites
this.spr_Turrets = new Sprite[108];
for (int i = 0; i < 108; i++) {
this.spr_Turrets[i] = new Sprite(this.img_Turret, 10, 10);
this.spr_Turrets[i].setVisible(false);
this.layMan_Game.append(this.spr_Turrets[i]);
}
// creates map from the int array
this.tiled_Map = new TiledLayer(16, 17, this.img_Map, 10, 10);
for (int i = 0; i < g_map.length; i++) {
int column = i % 16;
int row = (i - column) / 16;
this.tiled_Map.setCell(column, row, g_map[i]);
}
this.layMan_Game.append(this.tiled_Map);
// sets up the in-game user interface (HUD)
this.layMan_UI = new LayerManager();
// creates the status icons
this.spr_UI_icons = new Sprite[3];
for (int i = 0; i < 3; i++) {
this.spr_UI_icons[i] = new Sprite(this.ui_HUDicons, 16, 16);
this.layMan_UI.append(this.spr_UI_icons[i]);
}
// places and sets the status icons
this.spr_UI_icons[1].setPosition(this.m_screenWidth - 36, 2);
this.spr_UI_icons[0].setPosition(this.spr_UI_icons[1].getX() - 56,
2);
this.spr_UI_icons[2].setPosition(2, 2);
this.spr_UI_icons[1].nextFrame();
this.spr_UI_icons[2].nextFrame();
this.spr_UI_icons[2].nextFrame();
// creates control icons
this.spr_UI_controls = new Sprite[3];
for (int i = 0; i < 3; i++) {
this.spr_UI_controls[i] = new Sprite(this.ui_HUDcontrols, 32,
32);
this.layMan_UI.append(this.spr_UI_controls[i]);
}
// places and sets the control icons
this.spr_UI_controls[0].setPosition(m_centerHorizontal - 16,
m_screenHeight - 32);
this.spr_UI_controls[1].setPosition(m_centerHorizontal + 18,
m_screenHeight - 32);
this.spr_UI_controls[2].setPosition(m_centerHorizontal + 52,
m_screenHeight - 32);
this.spr_UI_controls[0].nextFrame();
this.spr_UI_controls[1].nextFrame();
this.spr_UI_controls[1].nextFrame();
for (int i = 0; i < 4; i++) {
this.spr_UI_controls[2].nextFrame();
}
sz_Credits = new String("40");
sz_Lives = new String("20");
sz_Score = new String("0");
sz_CurrentInfo = new String[7];
sz_CurrentInfo[0] = new String("Place new turret");
sz_CurrentInfo[1] = new String("Call for a wave");
sz_CurrentInfo[2] = new String("Retreat!");
sz_CurrentInfo[3] = new String("Pick a location");
sz_CurrentInfo[4] = new String("Not enough credits.");
sz_CurrentInfo[5] = new String("Wave completed!");
sz_CurrentInfo[6] = new String("WARNING! Defeat imminent");
if(Log.getEnabled()) {
Log.i(TAG, "Load complete.");
}
} catch (IOException ioex) {
if (Log.getEnabled()) {
Log.e(TAG, "Failed to load the resources, reason: "
+ ioex.getMessage());
ioex.printStackTrace();
}
}
}
public void update() {
// TODO Auto-generated method stub
this.getInput();
}
public void render() {
// TODO Auto-generated method stub
// draws the starscape background
this.g.drawImage(img_Background, m_centerHorizontal, m_centerVertical,
Graphics.VCENTER | Graphics.HCENTER);
// draws the HUD
g.drawImage(ui_Header, 0, 0, Graphics.TOP | Graphics.LEFT);
g.drawImage(ui_Tray, 0, m_screenHeight, Graphics.BOTTOM | Graphics.LEFT);
this.g.setColor(255, 255, 255);
this.g.setFont(Font.getFont(Font.FACE_PROPORTIONAL, Font.STYLE_BOLD,
Font.SIZE_LARGE));
g.drawString(sz_Credits, m_screenWidth - 38, 0,
Graphics.TOP | Graphics.RIGHT);
g.drawString(sz_Lives, m_screenWidth - 2, 0,
Graphics.TOP | Graphics.RIGHT);
g.drawString(sz_Score, 24, 0, Graphics.TOP | Graphics.LEFT);
this.g.setFont(Font.getFont(Font.FACE_SYSTEM, Font.STYLE_BOLD,
Font.SIZE_MEDIUM));
g.drawString(sz_CurrentInfo[ui_CurrentMessageIndex], m_centerHorizontal,
m_screenHeight - 42, Graphics.BOTTOM | Graphics.HCENTER);
// draws the map and the entities
layMan_Game.paint(g, m_cornerX, m_cornerY);
// draws the graphics elements of the UI
layMan_UI.paint(g, 0, 0);
super.flushGraphics();
}
private void getInput() {
int keyStates = super.getKeyStates();
int g_GameState = 0;
switch(g_GameState){
// base state for a running game, listening for main controls
case 0:
if(System.currentTimeMillis() - m_timeButtonLastPressed > 166) {
if (((keyStates & LEFT_PRESSED) != 0) && (ui_ControlSelectedIndex > 0)) {
m_timeButtonLastPressed = System.currentTimeMillis();
spr_UI_controls[ui_ControlSelectedIndex].prevFrame();
ui_ControlSelectedIndex--;
ui_CurrentMessageIndex = ui_ControlSelectedIndex;
isUIControlsMoving = true;
isUIControlsMovingToLeft = false;
spr_UI_controls[ui_ControlSelectedIndex].nextFrame();
}
else if (((keyStates & RIGHT_PRESSED) != 0) && (ui_ControlSelectedIndex < 2)) {
m_timeButtonLastPressed = System.currentTimeMillis();
spr_UI_controls[ui_ControlSelectedIndex].prevFrame();
ui_ControlSelectedIndex++;
ui_CurrentMessageIndex = ui_ControlSelectedIndex;
isUIControlsMoving = true;
isUIControlsMovingToLeft = true;
spr_UI_controls[ui_ControlSelectedIndex].nextFrame();
}
else if ((keyStates & FIRE_PRESSED) != 0) {
m_timeButtonLastPressed = System.currentTimeMillis();
isUIControlsMoving = false;
isUIControlsMovingToLeft = false;
switch(ui_ControlSelectedIndex){
// case 0:
// spr_Placeholder.setVisible(true);
// g_GameState = 7;
// ui_CurrentMessageIndex = 3;
// break;
// case 1:
// if(!isWave)
// g_GameState = 6;
// break;
// case 2:
// g_GameState = 1;
// reset();
// isMenu = true;
// isSplash = false;
// isGameover = false;
// isTheatre = false;
// break;
}
}
}
break;
// spawning minions in
case 6:
// currWaveNumber++;
// currWaveMinionHealth = (currWaveNumber * 15);
// spawn(currWaveMinionHealth, 20);
// isWave = true;
// isThereAnyMinionAlive = true;
// timeWaveStart = System.currentTimeMillis();
// g_GameState = 0;
break;
// new turret building game state
case 7:
if(System.currentTimeMillis() - m_timeButtonLastPressed > 66) {
// if (((keyStates & LEFT_PRESSED) != 0) && (spr_Placeholder.getX() > (0))) {
// spr_Placeholder.setPosition(spr_Placeholder.getX() - 5, spr_Placeholder.getY());
// m_timeButtonLastPressed = System.currentTimeMillis();
// }
// else if (((keyStates & RIGHT_PRESSED) != 0) && (spr_Placeholder.getX() < (160 - 10))) {
// spr_Placeholder.setPosition(spr_Placeholder.getX() + 5, spr_Placeholder.getY());
// m_timeButtonLastPressed = System.currentTimeMillis();
// }
// else if (((keyStates & UP_PRESSED) != 0) && (spr_Placeholder.getY() > (0))) {
// spr_Placeholder.setPosition(spr_Placeholder.getX(), spr_Placeholder.getY() - 5);
// m_timeButtonLastPressed = System.currentTimeMillis();
// }
// else if (((keyStates & DOWN_PRESSED) != 0) && (spr_Placeholder.getY() < (170 - 10))) {
// spr_Placeholder.setPosition(spr_Placeholder.getX(), spr_Placeholder.getY() + 5);
// m_timeButtonLastPressed = System.currentTimeMillis();
// }
// else if ((keyStates & FIRE_PRESSED) != 0 && (System.currentTimeMillis() - timeButtonLastPressed > 333)){
// build(spr_Placeholder.getX(), spr_Placeholder.getY(), 8, 20);
// m_timeButtonLastPressed = System.currentTimeMillis();
// g_GameState = 0;
// spr_Placeholder.setVisible(false);
// }
}
break;
}
}
}
|
src/hu/laszlolukacs/spacesiegebreakers/scenes/GameScene.java
|
Added an initial version of the game scene
|
src/hu/laszlolukacs/spacesiegebreakers/scenes/GameScene.java
|
Added an initial version of the game scene
|
|
Java
|
mit
|
error: pathspec 'src/test/integration/java/com/aif/common/ICorporaParser.java' did not match any file(s) known to git
|
2ad44a8a1196d7001debfe3f3b3a255ad9e9477f
| 1
|
b0noI/AIF2,b0noI/AIF2,shepeliev/AIF2,Hronom/AIF2,shepeliev/AIF2,Hronom/AIF2
|
package com.aif.common;
import java.io.InputStream;
public interface ICorporaParser {
public String toPlainText(InputStream inputXML);
}
|
src/test/integration/java/com/aif/common/ICorporaParser.java
|
add interface for corpora parser
|
src/test/integration/java/com/aif/common/ICorporaParser.java
|
add interface for corpora parser
|
|
Java
|
mit
|
error: pathspec 'rhogen-wizard/src/rhogenwizard/wizards/rhohub/LinkWizard.java' did not match any file(s) known to git
|
0e9cda714160d4dc2dbf207c6b2b93b14838f8b9
| 1
|
rhomobile/rhostudio,rhomobile/rhostudio
|
package rhogenwizard.wizards.rhohub;
import java.lang.reflect.InvocationTargetException;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.preference.IPreferenceStore;
import rhogenwizard.Activator;
import rhogenwizard.ShowPerspectiveJob;
import rhogenwizard.constants.CommonConstants;
import rhogenwizard.constants.UiConstants;
import rhogenwizard.project.extension.ProjectNotFoundException;
import rhogenwizard.wizards.BaseAppWizard;
public class LinkWizard extends BaseAppWizard
{
private BuildCredentialPage m_pageCred = null;
private BuildSettingPage m_pageSetting = null;
private IProject m_selectedProject = null;
public LinkWizard(IProject project)
{
super();
setNeedsProgressMonitor(true);
m_selectedProject = project;
}
/**
* Adding the page to the wizard.
*/
public void addPages()
{
m_pageCred = new BuildCredentialPage(m_selectedProject);
m_pageSetting = new BuildSettingPage(m_selectedProject);
addPage(m_pageCred);
addPage(m_pageSetting);
}
public boolean performFinish()
{
IRunnableWithProgress op = new IRunnableWithProgress()
{
public void run(IProgressMonitor monitor) throws InvocationTargetException
{
try
{
doFinish(monitor);
}
catch (CoreException e)
{
throw new InvocationTargetException(e);
}
catch (ProjectNotFoundException e)
{
e.printStackTrace();
}
finally
{
monitor.done();
}
}
};
try
{
getContainer().run(true, true, op);
}
catch (InterruptedException e)
{
return false;
}
catch (InvocationTargetException e)
{
Throwable realException = e.getTargetException();
MessageDialog.openError(getShell(), "Error", realException.getMessage());
return false;
}
return true;
}
/**
* @throws ProjectNotFoundExtension
* The worker method. It will find the container, create the
* file if missing or just replace its contents, and open the
* editor on the newly created file.
*/
private void doFinish(IProgressMonitor monitor) throws CoreException, ProjectNotFoundException
{
try
{
monitor.beginTask("Start building on rhohub server", 1);
if (CommonConstants.checkRhohubVersion)
{
// monitor.setTaskName("Check Rhodes version...");
//
// try
// {
// if (!RunExeHelper.checkRhodesVersion(CommonConstants.rhodesVersion))
// {
// throw new IOException();
// }
// }
// catch (IOException e)
// {
// String msg = "Installed Rhohub have old version, need rhodes version equal or greater "
// + CommonConstants.rhodesVersion + " Please reinstall it (See 'http://docs.rhomobile.com/rhodes/install' for more information)";
// DialogUtils.error("Error", msg);
// return;
// }
}
monitor.worked(1);
IPreferenceStore store = Activator.getDefault().getPreferenceStore();
if (store != null)
{
}
ShowPerspectiveJob job = new ShowPerspectiveJob("show rhodes perspective",
UiConstants.rhodesPerspectiveId);
job.schedule();
monitor.worked(1);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
|
rhogen-wizard/src/rhogenwizard/wizards/rhohub/LinkWizard.java
|
add link wizard
|
rhogen-wizard/src/rhogenwizard/wizards/rhohub/LinkWizard.java
|
add link wizard
|
|
Java
|
epl-1.0
|
63c7dd92879fdbd3a61a51b17eeb63de536acf67
| 0
|
AvinashPD/mondrian,preisanalytics/mondrian,AvinashPD/mondrian,nextelBIS/mondrian,AvinashPD/mondrian,cesarmarinhorj/mondrian,ivanpogodin/mondrian,lgrill-pentaho/mondrian,OSBI/mondrian,mdamour1976/mondrian,lgrill-pentaho/mondrian,nextelBIS/mondrian,truvenganong/mondrian,syncron/mondrian,pentaho/mondrian,julianhyde/mondrian,dkincade/mondrian,Seiferxx/mondrian,syncron/mondrian,lgrill-pentaho/mondrian,wetet2/mondrian,cocosli/mondrian,openedbox/mondrian,dkincade/mondrian,truvenganong/mondrian,nextelBIS/mondrian,stiberger/mondrian,wetet2/mondrian,syncron/mondrian,sayanh/mondrian,mustangore/mondrian,cocosli/mondrian,preisanalytics/mondrian,stiberger/mondrian,ivanpogodin/mondrian,pentaho/mondrian,dkincade/mondrian,mustangore/mondrian,cesarmarinhorj/mondrian,openedbox/mondrian,openedbox/mondrian,cesarmarinhorj/mondrian,ivanpogodin/mondrian,wetet2/mondrian,mdamour1976/mondrian,sayanh/mondrian,cocosli/mondrian,stiberger/mondrian,pedrofvteixeira/mondrian,bmorrise/mondrian,pedrofvteixeira/mondrian,OSBI/mondrian,pentaho/mondrian,bmorrise/mondrian,mdamour1976/mondrian,julianhyde/mondrian,truvenganong/mondrian,Seiferxx/mondrian,preisanalytics/mondrian,OSBI/mondrian,sayanh/mondrian,Seiferxx/mondrian,pedrofvteixeira/mondrian,mustangore/mondrian,bmorrise/mondrian
|
/*
// This software is subject to the terms of the Eclipse Public License v1.0
// Agreement, available at the following URL:
// http://www.eclipse.org/legal/epl-v10.html.
// You must accept the terms of that agreement to use this software.
//
// Copyright (C) 2003-2005 Julian Hyde
// Copyright (C) 2005-2012 Pentaho and others
// All Rights Reserved.
*/
package mondrian.olap.fun;
import mondrian.olap.*;
import mondrian.resource.MondrianResource;
import mondrian.test.FoodMartTestCase;
import mondrian.test.TestContext;
import mondrian.udf.*;
import mondrian.util.Bug;
import junit.framework.Assert;
import junit.framework.ComparisonFailure;
import org.apache.log4j.Logger;
import org.eigenbase.xom.StringEscaper;
import java.io.*;
import java.util.*;
/**
* <code>FunctionTest</code> tests the functions defined in
* {@link BuiltinFunTable}.
*
* @author gjohnson
*/
public class FunctionTest extends FoodMartTestCase {
private static final Logger LOGGER = Logger.getLogger(FunctionTest.class);
private static final String months =
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]";
private static final String quarters =
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]";
private static final String year1997 = "[Time].[1997]";
private static final String hierarchized1997 =
year1997
+ "\n"
+ "[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]";
private static final String NullNumericExpr =
" ([Measures].[Unit Sales],"
+ " [Customers].[All Customers].[USA].[CA].[Bellflower], "
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]."
+ "[Beer and Wine].[Beer].[Good].[Good Imported Beer])";
private static final String TimeWeekly =
MondrianProperties.instance().SsasCompatibleNaming.get()
? "[Time].[Weekly]"
: "[Time.Weekly]";
// ~ Constructors ----------------------------------------------------------
/**
* Creates a FunctionTest.
*/
public FunctionTest() {
}
/**
* Creates a FuncionTest with an explicit name.
*
* @param s Test name
*/
public FunctionTest(String s) {
super(s);
}
// ~ Methods ---------------------------------------------------------------
// ~ Test methods ----------------------------------------------------------
/**
* Tests that Integeer.MIN_VALUE(-2147483648) does not cause NPE.
*/
public void testParallelPeriodMinValue() {
executeQuery(
"with "
+ "member [measures].[foo] as "
+ "'([Measures].[unit sales],"
+ "ParallelPeriod([Time].[Quarter], -2147483648))' "
+ "select "
+ "[measures].[foo] on columns, "
+ "[time].[1997].children on rows "
+ "from [sales]");
}
/**
* Tests that Integeer.MIN_VALUE(-2147483648) in Lag is handled correctly.
*/
public void testLagMinValue() {
executeQuery(
"with "
+ "member [measures].[foo] as "
+ "'([Measures].[unit sales], [Time].[1997].[Q1].Lag(-2147483648))' "
+ "select "
+ "[measures].[foo] on columns, "
+ "[time].[1997].children on rows "
+ "from [sales]");
}
/**
* Tests that ParallelPeriod with Aggregate function works
*/
public void testParallelPeriodWithSlicer() {
assertQueryReturns(
"With "
+ "Set [*NATIVE_CJ_SET] as 'NonEmptyCrossJoin([*BASE_MEMBERS_Time],[*BASE_MEMBERS_Product])' "
+ "Set [*BASE_MEMBERS_Measures] as '{[Measures].[*FORMATTED_MEASURE_0], [Measures].[*FORMATTED_MEASURE_1]}' "
+ "Set [*BASE_MEMBERS_Time] as '{[Time].[1997].[Q2].[6]}' "
+ "Set [*NATIVE_MEMBERS_Time] as 'Generate([*NATIVE_CJ_SET], {[Time].[Time].CurrentMember})' "
+ "Set [*BASE_MEMBERS_Product] as '{[Product].[All Products].[Drink],[Product].[All Products].[Food]}' "
+ "Set [*NATIVE_MEMBERS_Product] as 'Generate([*NATIVE_CJ_SET], {[Product].CurrentMember})' "
+ "Member [Measures].[*FORMATTED_MEASURE_0] as '[Measures].[Customer Count]', FORMAT_STRING = '#,##0', SOLVE_ORDER=400 "
+ "Member [Measures].[*FORMATTED_MEASURE_1] as "
+ "'([Measures].[Customer Count], ParallelPeriod([Time].[Quarter], 1, [Time].[Time].currentMember))', FORMAT_STRING = '#,##0', SOLVE_ORDER=-200 "
+ "Member [Product].[*FILTER_MEMBER] as 'Aggregate ([*NATIVE_MEMBERS_Product])', SOLVE_ORDER=-300 "
+ "Select "
+ "[*BASE_MEMBERS_Measures] on columns, Non Empty Generate([*NATIVE_CJ_SET], {([Time].[Time].CurrentMember)}) on rows "
+ "From [Sales] "
+ "Where ([Product].[*FILTER_MEMBER])",
"Axis #0:\n"
+ "{[Product].[*FILTER_MEMBER]}\n"
+ "Axis #1:\n"
+ "{[Measures].[*FORMATTED_MEASURE_0]}\n"
+ "{[Measures].[*FORMATTED_MEASURE_1]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "Row #0: 1,314\n"
+ "Row #0: 1,447\n");
}
public void testParallelperiodOnLevelsString() {
assertQueryReturns(
"with member Measures.[Prev Unit Sales] as 'parallelperiod(Levels(\"[Time].[Month]\"))'\n"
+ "select {[Measures].[Unit Sales], Measures.[Prev Unit Sales]} ON COLUMNS,\n"
+ "[Gender].members ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Prev Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 21,081\n"
+ "Row #0: 20,179\n"
+ "Row #1: 10,536\n"
+ "Row #1: 9,990\n"
+ "Row #2: 10,545\n"
+ "Row #2: 10,189\n");
}
public void testParallelperiodOnStrToMember() {
assertQueryReturns(
"with member Measures.[Prev Unit Sales] as 'parallelperiod(strToMember(\"[Time].[1997].[Q2]\"))'\n"
+ "select {[Measures].[Unit Sales], Measures.[Prev Unit Sales]} ON COLUMNS,\n"
+ "[Gender].members ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Prev Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 21,081\n"
+ "Row #0: 20,957\n"
+ "Row #1: 10,536\n"
+ "Row #1: 10,266\n"
+ "Row #2: 10,545\n"
+ "Row #2: 10,691\n");
assertQueryThrows(
"with member Measures.[Prev Unit Sales] as 'parallelperiod(strToMember(\"[Time].[Quarter]\"))'\n"
+ "select {[Measures].[Unit Sales], Measures.[Prev Unit Sales]} ON COLUMNS,\n"
+ "[Gender].members ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Cannot find MDX member '[Time].[Quarter]'. Make sure it is indeed a member and not a level or a hierarchy.");
}
public void testNumericLiteral() {
assertExprReturns("2", "2");
if (false) {
// The test is currently broken because the value 2.5 is formatted
// as "2". TODO: better default format string
assertExprReturns("2.5", "2.5");
}
assertExprReturns("-10.0", "-10");
getTestContext().assertExprDependsOn("1.5", "{}");
}
public void testStringLiteral() {
// single-quoted string
if (false) {
// TODO: enhance parser so that you can include a quoted string
// inside a WITH MEMBER clause
assertExprReturns("'foobar'", "foobar");
}
// double-quoted string
assertExprReturns("\"foobar\"", "foobar");
// literals don't depend on any dimensions
getTestContext().assertExprDependsOn("\"foobar\"", "{}");
}
public void testDimensionHierarchy() {
assertExprReturns("[Time].Dimension.Name", "Time");
}
public void testLevelDimension() {
assertExprReturns("[Time].[Year].Dimension.UniqueName", "[Time]");
}
public void testMemberDimension() {
assertExprReturns("[Time].[1997].[Q2].Dimension.UniqueName", "[Time]");
}
public void testDimensionsNumeric() {
getTestContext().assertExprDependsOn("Dimensions(2).Name", "{}");
getTestContext().assertMemberExprDependsOn(
"Dimensions(3).CurrentMember",
TestContext.allHiers());
assertExprReturns("Dimensions(2).Name", "Store Size in SQFT");
// bug 1426134 -- Dimensions(0) throws 'Index '0' out of bounds'
assertExprReturns("Dimensions(0).Name", "Measures");
assertExprThrows("Dimensions(-1).Name", "Index '-1' out of bounds");
assertExprThrows("Dimensions(100).Name", "Index '100' out of bounds");
// Since Dimensions returns a Hierarchy, can apply CurrentMember.
assertAxisReturns(
"Dimensions(3).CurrentMember",
"[Store Type].[All Store Types]");
}
public void testDimensionsString() {
getTestContext().assertExprDependsOn(
"Dimensions(\"foo\").UniqueName",
"{}");
getTestContext().assertMemberExprDependsOn(
"Dimensions(\"foo\").CurrentMember", TestContext.allHiers());
assertExprReturns("Dimensions(\"Store\").UniqueName", "[Store]");
// Since Dimensions returns a Hierarchy, can apply Children.
assertAxisReturns(
"Dimensions(\"Store\").Children",
"[Store].[Canada]\n"
+ "[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testDimensionsDepends() {
final String expression =
"Crossjoin("
+ "{Dimensions(\"Measures\").CurrentMember.Hierarchy.CurrentMember}, "
+ "{Dimensions(\"Product\")})";
assertAxisReturns(
expression, "{[Measures].[Unit Sales], [Product].[All Products]}");
getTestContext().assertSetExprDependsOn(
expression, TestContext.allHiers());
}
public void testTime() {
assertExprReturns(
"[Time].[1997].[Q1].[1].Hierarchy.UniqueName", "[Time]");
}
public void testBasic9() {
assertExprReturns(
"[Gender].[All Gender].[F].Hierarchy.UniqueName", "[Gender]");
}
public void testFirstInLevel9() {
assertExprReturns(
"[Education Level].[All Education Levels].[Bachelors Degree].Hierarchy.UniqueName",
"[Education Level]");
}
public void testHierarchyAll() {
assertExprReturns(
"[Gender].[All Gender].Hierarchy.UniqueName", "[Gender]");
}
public void testNullMember() {
// MSAS fails here, but Mondrian doesn't.
assertExprReturns(
"[Gender].[All Gender].Parent.Level.UniqueName",
"[Gender].[(All)]");
// MSAS fails here, but Mondrian doesn't.
assertExprReturns(
"[Gender].[All Gender].Parent.Hierarchy.UniqueName", "[Gender]");
// MSAS fails here, but Mondrian doesn't.
assertExprReturns(
"[Gender].[All Gender].Parent.Dimension.UniqueName", "[Gender]");
// MSAS succeeds too
assertExprReturns(
"[Gender].[All Gender].Parent.Children.Count", "0");
if (isDefaultNullMemberRepresentation()) {
// MSAS returns "" here.
assertExprReturns(
"[Gender].[All Gender].Parent.UniqueName", "[Gender].[#null]");
// MSAS returns "" here.
assertExprReturns(
"[Gender].[All Gender].Parent.Name", "#null");
}
}
/**
* Tests use of NULL literal to generate a null cell value.
* Testcase is from bug 1440344.
*/
public void testNullValue() {
assertQueryReturns(
"with member [Measures].[X] as 'IIF([Measures].[Store Sales]>10000,[Measures].[Store Sales],Null)'\n"
+ "select\n"
+ "{[Measures].[X]} on columns,\n"
+ "{[Product].[Product Department].members} on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[X]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Food].[Baked Goods]}\n"
+ "{[Product].[Food].[Baking Goods]}\n"
+ "{[Product].[Food].[Breakfast Foods]}\n"
+ "{[Product].[Food].[Canned Foods]}\n"
+ "{[Product].[Food].[Canned Products]}\n"
+ "{[Product].[Food].[Dairy]}\n"
+ "{[Product].[Food].[Deli]}\n"
+ "{[Product].[Food].[Eggs]}\n"
+ "{[Product].[Food].[Frozen Foods]}\n"
+ "{[Product].[Food].[Meat]}\n"
+ "{[Product].[Food].[Produce]}\n"
+ "{[Product].[Food].[Seafood]}\n"
+ "{[Product].[Food].[Snack Foods]}\n"
+ "{[Product].[Food].[Snacks]}\n"
+ "{[Product].[Food].[Starchy Foods]}\n"
+ "{[Product].[Non-Consumable].[Carousel]}\n"
+ "{[Product].[Non-Consumable].[Checkout]}\n"
+ "{[Product].[Non-Consumable].[Health and Hygiene]}\n"
+ "{[Product].[Non-Consumable].[Household]}\n"
+ "{[Product].[Non-Consumable].[Periodicals]}\n"
+ "Row #0: 14,029.08\n"
+ "Row #1: 27,748.53\n"
+ "Row #2: \n"
+ "Row #3: 16,455.43\n"
+ "Row #4: 38,670.41\n"
+ "Row #5: \n"
+ "Row #6: 39,774.34\n"
+ "Row #7: \n"
+ "Row #8: 30,508.85\n"
+ "Row #9: 25,318.93\n"
+ "Row #10: \n"
+ "Row #11: 55,207.50\n"
+ "Row #12: \n"
+ "Row #13: 82,248.42\n"
+ "Row #14: \n"
+ "Row #15: 67,609.82\n"
+ "Row #16: 14,550.05\n"
+ "Row #17: 11,756.07\n"
+ "Row #18: \n"
+ "Row #19: \n"
+ "Row #20: 32,571.86\n"
+ "Row #21: 60,469.89\n"
+ "Row #22: \n");
}
public void testNullInMultiplication() {
assertExprReturns("NULL*1", "");
assertExprReturns("1*NULL", "");
assertExprReturns("NULL*NULL", "");
}
public void testNullInAddition() {
assertExprReturns("1+NULL", "1");
assertExprReturns("NULL+1", "1");
}
public void testNullInSubtraction() {
assertExprReturns("1-NULL", "1");
assertExprReturns("NULL-1", "-1");
}
public void testMemberLevel() {
assertExprReturns(
"[Time].[1997].[Q1].[1].Level.UniqueName",
"[Time].[Month]");
}
public void testLevelsNumeric() {
assertExprReturns("[Time].[Time].Levels(2).Name", "Month");
assertExprReturns("[Time].[Time].Levels(0).Name", "Year");
assertExprReturns("[Product].Levels(0).Name", "(All)");
}
public void testLevelsTooSmall() {
assertExprThrows(
"[Time].[Time].Levels(-1).Name", "Index '-1' out of bounds");
}
public void testLevelsTooLarge() {
assertExprThrows(
"[Time].[Time].Levels(8).Name", "Index '8' out of bounds");
}
public void testHierarchyLevelsString() {
assertExprReturns(
"[Time].[Time].Levels(\"Year\").UniqueName", "[Time].[Year]");
}
public void testHierarchyLevelsStringFail() {
assertExprThrows(
"[Time].[Time].Levels(\"nonexistent\").UniqueName",
"Level 'nonexistent' not found in hierarchy '[Time]'");
}
public void testLevelsString() {
assertExprReturns(
"Levels(\"[Time].[Year]\").UniqueName",
"[Time].[Year]");
}
public void testLevelsStringFail() {
assertExprThrows(
"Levels(\"nonexistent\").UniqueName",
"Level 'nonexistent' not found");
}
public void testIsEmptyQuery() {
String desiredResult =
"Axis #0:\n"
+ "{[Time].[1997].[Q4].[12], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer], [Measures].[Foo]}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "{[Store].[USA].[WA].[Bremerton]}\n"
+ "{[Store].[USA].[WA].[Seattle]}\n"
+ "{[Store].[USA].[WA].[Spokane]}\n"
+ "{[Store].[USA].[WA].[Tacoma]}\n"
+ "{[Store].[USA].[WA].[Walla Walla]}\n"
+ "{[Store].[USA].[WA].[Yakima]}\n"
+ "Row #0: 5\n"
+ "Row #0: 5\n"
+ "Row #0: 2\n"
+ "Row #0: 5\n"
+ "Row #0: 11\n"
+ "Row #0: 5\n"
+ "Row #0: 4\n";
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS 'Iif(IsEmpty([Measures].[Unit Sales]), 5, [Measures].[Unit Sales])'\n"
+ "SELECT {[Store].[USA].[WA].children} on columns\n"
+ "FROM Sales\n"
+ "WHERE ([Time].[1997].[Q4].[12],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Measures].[Foo])",
desiredResult);
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS 'Iif([Measures].[Unit Sales] IS EMPTY, 5, [Measures].[Unit Sales])'\n"
+ "SELECT {[Store].[USA].[WA].children} on columns\n"
+ "FROM Sales\n"
+ "WHERE ([Time].[1997].[Q4].[12],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Measures].[Foo])",
desiredResult);
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS 'Iif([Measures].[Bar] IS EMPTY, 1, [Measures].[Bar])'\n"
+ "MEMBER [Measures].[Bar] AS 'CAST(\"42\" AS INTEGER)'\n"
+ "SELECT {[Measures].[Unit Sales], [Measures].[Foo]} on columns\n"
+ "FROM Sales\n"
+ "WHERE ([Time].[1998].[Q4].[12])",
"Axis #0:\n"
+ "{[Time].[1998].[Q4].[12]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: \n"
+ "Row #0: 42\n");
}
public void testIsEmptyWithAggregate() {
assertQueryReturns(
"WITH MEMBER [gender].[foo] AS 'isEmpty(Aggregate({[Gender].m}))' "
+ "SELECT {Gender.foo} on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Gender].[foo]}\n"
+ "Row #0: false\n");
}
public void testIsEmpty()
{
assertBooleanExprReturns("[Gender].[All Gender].Parent IS NULL", true);
// Any functions that return a member from parameters that
// include a member and that member is NULL also give a NULL.
// Not a runtime exception.
assertBooleanExprReturns(
"[Gender].CurrentMember.Parent.NextMember IS NULL",
true);
if (!Bug.BugMondrian207Fixed) {
return;
}
// When resolving a tuple's value in the cube, if there is
// at least one NULL member in the tuple should return a
// NULL cell value.
assertBooleanExprReturns(
"IsEmpty(([Time].currentMember.Parent, [Measures].[Unit Sales]))",
false);
assertBooleanExprReturns(
"IsEmpty(([Time].currentMember, [Measures].[Unit Sales]))",
false);
// EMPTY refers to a genuine cell value that exists in the cube space,
// and has no NULL members in the tuple,
// but has no fact data at that crossing,
// so it evaluates to EMPTY as a cell value.
assertBooleanExprReturns(
"IsEmpty(\n"
+ " ([Time].[1997].[Q4].[12],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Store].[All Stores].[USA].[WA].[Bellingham]))", true);
assertBooleanExprReturns(
"IsEmpty(\n"
+ " ([Time].[1997].[Q4].[11],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Store].[All Stores].[USA].[WA].[Bellingham]))", false);
// The empty set is neither EMPTY nor NULL.
// should give 0 as a result, not NULL and not EMPTY.
assertQueryReturns(
"WITH SET [empty set] AS '{}'\n"
+ " MEMBER [Measures].[Set Size] AS 'Count([empty set])'\n"
+ " MEMBER [Measures].[Set Size Is Empty] AS 'CASE WHEN IsEmpty([Measures].[Set Size]) THEN 1 ELSE 0 END '\n"
+ "SELECT [Measures].[Set Size] on columns", "");
assertQueryReturns(
"WITH SET [empty set] AS '{}'\n"
+ "WITH MEMBER [Measures].[Set Size] AS 'Count([empty set])'\n"
+ "SELECT [Measures].[Set Size] on columns", "");
// Run time errors are BAD things. They should not occur
// in almost all cases. In fact there should be no
// logically formed MDX that generates them. An ERROR
// value in a cell though is perfectly legal - e.g. a
// divide by 0.
// E.g.
String foo =
"WITH [Measures].[Ratio This Period to Previous] as\n"
+ "'([Measures].[Sales],[Time].CurrentMember/([Measures].[Sales],[Time].CurrentMember.PrevMember)'\n"
+ "SELECT [Measures].[Ratio This Period to Previous] ON COLUMNS,\n"
+ "[Time].Members ON ROWS\n"
+ "FROM ...";
// For the [Time].[All Time] row as well as the first
// year, first month etc, the PrevMember will evaluate to
// NULL, the tuple will evaluate to NULL and the division
// will implicitly convert the NULL to 0 and then evaluate
// to an ERROR value due to a divide by 0.
// This leads to another point: NULL and EMPTY values get
// implicitly converted to 0 when treated as numeric
// values for division and multiplication but for addition
// and subtraction, NULL is treated as NULL (5+NULL yields
// NULL).
// I have no idea about how EMPTY works. I.e. is does
// 5+EMPTY yield 5 or EMPTY or NULL or what?
// E.g.
String foo2 =
"WITH MEMBER [Measures].[5 plus empty] AS\n"
+ "'5+([Product].[All Products].[Ski boots],[Geography].[All Geography].[Hawaii])'\n"
+ "SELECT [Measures].[5 plus empty] ON COLUMNS\n"
+ "FROM ...";
// Does this yield EMPTY, 5, NULL or ERROR?
// Lastly, IS NULL and IS EMPTY are both legal and
// distinct. <<Object>> IS {<<Object>> | NULL} and
// <<Value>> IS EMPTY.
// E.g.
// a) [Time].CurrentMember.Parent IS [Time].[Year].[2004]
// is also a perfectly legal expression and better than
// [Time].CurrentMember.Parent.Name="2004".
// b) ([Measures].[Sales],[Time].FirstSibling) IS EMPTY is
// a legal expression.
// Microsoft's site says that the EMPTY value participates in 3 value
// logic e.g. TRUE AND EMPTY gives EMPTY, FALSE AND EMPTY gives FALSE.
// todo: test for this
}
public void testQueryWithoutValidMeasure() {
assertQueryReturns(
"with\n"
+ "member measures.[without VM] as ' [measures].[unit sales] '\n"
+ "select {measures.[without VM] } on 0,\n"
+ "[Warehouse].[Country].members on 1 from [warehouse and sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[without VM]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[Canada]}\n"
+ "{[Warehouse].[Mexico]}\n"
+ "{[Warehouse].[USA]}\n"
+ "Row #0: \n"
+ "Row #1: \n"
+ "Row #2: \n");
}
/** Tests the <code>ValidMeasure</code> function. */
public void testValidMeasure() {
assertQueryReturns(
"with\n"
+ "member measures.[with VM] as 'validmeasure([measures].[unit sales])'\n"
+ "select { measures.[with VM]} on 0,\n"
+ "[Warehouse].[Country].members on 1 from [warehouse and sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[with VM]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[Canada]}\n"
+ "{[Warehouse].[Mexico]}\n"
+ "{[Warehouse].[USA]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 266,773\n"
+ "Row #2: 266,773\n");
}
public void _testValidMeasureNonEmpty() {
// Note that [with VM2] is NULL where it needs to be - and therefore
// does not prevent NON EMPTY from eliminating empty rows.
assertQueryReturns(
"with set [Foo] as ' Crossjoin({[Time].Children}, {[Measures].[Warehouse Sales]}) '\n"
+ " member [Measures].[with VM] as 'ValidMeasure([Measures].[Unit Sales])'\n"
+ " member [Measures].[with VM2] as 'Iif(Count(Filter([Foo], not isempty([Measures].CurrentMember))) > 0, ValidMeasure([Measures].[Unit Sales]), NULL)'\n"
+ "select NON EMPTY Crossjoin({[Time].Children}, {[Measures].[with VM2], [Measures].[Warehouse Sales]}) ON COLUMNS,\n"
+ " NON EMPTY {[Warehouse].[All Warehouses].[USA].[WA].Children} ON ROWS\n"
+ "from [Warehouse and Sales]\n"
+ "where [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]",
"Axis #0:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1], [Measures].[with VM2]}\n"
+ "{[Time].[1997].[Q1], [Measures].[Warehouse Sales]}\n"
+ "{[Time].[1997].[Q2], [Measures].[with VM2]}\n"
+ "{[Time].[1997].[Q2], [Measures].[Warehouse Sales]}\n"
+ "{[Time].[1997].[Q3], [Measures].[with VM2]}\n"
+ "{[Time].[1997].[Q4], [Measures].[with VM2]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[USA].[WA].[Seattle]}\n"
+ "{[Warehouse].[USA].[WA].[Tacoma]}\n"
+ "{[Warehouse].[USA].[WA].[Yakima]}\n"
+ "Row #0: 26\n"
+ "Row #0: 34.793\n"
+ "Row #0: 25\n"
+ "Row #0: \n"
+ "Row #0: 36\n"
+ "Row #0: 28\n"
+ "Row #1: 26\n"
+ "Row #1: \n"
+ "Row #1: 25\n"
+ "Row #1: 64.615\n"
+ "Row #1: 36\n"
+ "Row #1: 28\n"
+ "Row #2: 26\n"
+ "Row #2: 79.657\n"
+ "Row #2: 25\n"
+ "Row #2: \n"
+ "Row #2: 36\n"
+ "Row #2: 28\n");
}
public void testValidMeasureTupleHasAnotherMember() {
assertQueryReturns(
"with\n"
+ "member measures.[with VM] as 'validmeasure(([measures].[unit sales],[customers].[all customers]))'\n"
+ "select { measures.[with VM]} on 0,\n"
+ "[Warehouse].[Country].members on 1 from [warehouse and sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[with VM]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[Canada]}\n"
+ "{[Warehouse].[Mexico]}\n"
+ "{[Warehouse].[USA]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 266,773\n"
+ "Row #2: 266,773\n");
}
public void testValidMeasureDepends() {
String s12 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"ValidMeasure([Measures].[Unit Sales])", s12);
String s11 = TestContext.allHiersExcept("[Measures]", "[Time]");
getTestContext().assertExprDependsOn(
"ValidMeasure(([Measures].[Unit Sales], [Time].[1997].[Q1]))", s11);
String s1 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"ValidMeasure(([Measures].[Unit Sales], "
+ "[Time].[Time].CurrentMember.Parent))",
s1);
}
public void testAncestor() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA].[Los Angeles],[Store Country])");
Assert.assertEquals("USA", member.getName());
assertAxisThrows(
"Ancestor([Store].[USA].[CA].[Los Angeles],[Promotions].[Promotion Name])",
"Error while executing query");
}
public void testAncestorNumeric() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA].[Los Angeles],1)");
Assert.assertEquals("CA", member.getName());
member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA].[Los Angeles], 0)");
Assert.assertEquals("Los Angeles", member.getName());
final TestContext testContextRagged =
getTestContext().withCube("[Sales Ragged]");
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Vatican], 1)");
Assert.assertEquals("All Stores", member.getName());
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[USA].[Washington], 1)");
Assert.assertEquals("USA", member.getName());
// complicated way to say "1".
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[USA].[Washington], 7 * 6 - 41)");
Assert.assertEquals("USA", member.getName());
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Vatican], 2)");
Assert.assertNull("Ancestor at 2 must be null", member);
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Vatican], -5)");
Assert.assertNull("Ancestor at -5 must be null", member);
}
public void testAncestorHigher() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA],[Store].[Store City])");
Assert.assertNull(member); // MSOLAP returns null
}
public void testAncestorSameLevel() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[Canada],[Store].[Store Country])");
Assert.assertEquals("Canada", member.getName());
}
public void testAncestorWrongHierarchy() {
// MSOLAP gives error "Formula error - dimensions are not
// valid (they do not match) - in the Ancestor function"
assertAxisThrows(
"Ancestor([Gender].[M],[Store].[Store Country])",
"Error while executing query");
}
public void testAncestorAllLevel() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA],[Store].Levels(0))");
Assert.assertTrue(member.isAll());
}
public void testAncestorWithHiddenParent() {
final TestContext testContext =
getTestContext().withCube("[Sales Ragged]");
Member member =
testContext.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Israel].[Haifa], "
+ "[Store].[Store Country])");
assertNotNull("Member must not be null.", member);
Assert.assertEquals("Israel", member.getName());
}
public void testAncestorDepends() {
getTestContext().assertExprDependsOn(
"Ancestor([Store].CurrentMember, [Store].[Store Country]).Name",
"{[Store]}");
getTestContext().assertExprDependsOn(
"Ancestor([Store].[All Stores].[USA], "
+ "[Store].CurrentMember.Level).Name",
"{[Store]}");
getTestContext().assertExprDependsOn(
"Ancestor([Store].[All Stores].[USA], "
+ "[Store].[Store Country]).Name",
"{}");
getTestContext().assertExprDependsOn(
"Ancestor([Store].CurrentMember, 2+1).Name", "{[Store]}");
}
public void testAncestors() {
// Test that we can execute Ancestors by passing a level as
// the depth argument (PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Employees].[All Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff].[Teanna Cobb], [Employees].[All Employees].Level)'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [HR]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply]}\n"
+ "{[Employees].[Sheri Nowmer]}\n"
+ "{[Employees].[All Employees]}\n"
+ "Row #0: $984.45\n"
+ "Row #0: $3,426.54\n"
+ "Row #0: $3,610.14\n"
+ "Row #0: $17,099.20\n"
+ "Row #0: $36,494.07\n"
+ "Row #0: $39,431.67\n"
+ "Row #0: $39,431.67\n");
// Test that we can execute Ancestors by passing a level as
// the depth argument (non PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Store].[USA].[CA].[Los Angeles], [Store].[Store Country])'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [Sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 74,748\n"
+ "Row #0: 266,773\n");
// Test that we can execute Ancestors by passing an integer as
// the depth argument (PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Employees].[All Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff].[Teanna Cobb], 3)'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [HR]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long]}\n"
+ "Row #0: $984.45\n"
+ "Row #0: $3,426.54\n"
+ "Row #0: $3,610.14\n");
// Test that we can execute Ancestors by passing an integer as
// the depth argument (non PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Store].[USA].[CA].[Los Angeles], 2)'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [Sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 74,748\n"
+ "Row #0: 266,773\n");
// Test that we can count the number of ancestors.
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Employees].[All Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff].[Teanna Cobb], [Employees].[All Employees].Level)'\n"
+ "member [Measures].[Depth] as\n"
+ " 'Count([*ancestors])'\n"
+ "select\n"
+ " [Measures].[Depth] on columns\n"
+ "from [HR]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Depth]}\n"
+ "Row #0: 7\n");
// test depth argument not a level
assertAxisThrows(
"Ancestors([Store].[USA].[CA].[Los Angeles],[Store])",
"Error while executing query");
}
public void testOrdinal() {
final TestContext testContext =
getTestContext().withCube("Sales Ragged");
Cell cell =
testContext.executeExprRaw(
"[Store].[All Stores].[Vatican].ordinal");
assertEquals(
"Vatican is at level 1.",
1,
((Number)cell.getValue()).intValue());
cell = testContext.executeExprRaw(
"[Store].[All Stores].[USA].[Washington].ordinal");
assertEquals(
"Washington is at level 3.",
3,
((Number) cell.getValue()).intValue());
}
public void testClosingPeriodNoArgs() {
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod()", "{[Time]}");
// MSOLAP returns [1997].[Q4], because [Time].CurrentMember =
// [1997].
Member member = executeSingletonAxis("ClosingPeriod()");
Assert.assertEquals("[Time].[1997].[Q4]", member.getUniqueName());
}
public void testClosingPeriodLevel() {
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod([Time].[Year])", "{[Time]}");
getTestContext().assertMemberExprDependsOn(
"([Measures].[Unit Sales], ClosingPeriod([Time].[Month]))",
"{[Time]}");
Member member;
member = executeSingletonAxis("ClosingPeriod([Year])");
Assert.assertEquals("[Time].[1997]", member.getUniqueName());
member = executeSingletonAxis("ClosingPeriod([Quarter])");
Assert.assertEquals("[Time].[1997].[Q4]", member.getUniqueName());
member = executeSingletonAxis("ClosingPeriod([Month])");
Assert.assertEquals("[Time].[1997].[Q4].[12]", member.getUniqueName());
assertQueryReturns(
"with member [Measures].[Closing Unit Sales] as "
+ "'([Measures].[Unit Sales], ClosingPeriod([Time].[Month]))'\n"
+ "select non empty {[Measures].[Closing Unit Sales]} on columns,\n"
+ " {Descendants([Time].[1997])} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Closing Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 26,796\n"
+ "Row #1: 23,706\n"
+ "Row #2: 21,628\n"
+ "Row #3: 20,957\n"
+ "Row #4: 23,706\n"
+ "Row #5: 21,350\n"
+ "Row #6: 20,179\n"
+ "Row #7: 21,081\n"
+ "Row #8: 21,350\n"
+ "Row #9: 20,388\n"
+ "Row #10: 23,763\n"
+ "Row #11: 21,697\n"
+ "Row #12: 20,388\n"
+ "Row #13: 26,796\n"
+ "Row #14: 19,958\n"
+ "Row #15: 25,270\n"
+ "Row #16: 26,796\n");
assertQueryReturns(
"with member [Measures].[Closing Unit Sales] as '([Measures].[Unit Sales], ClosingPeriod([Time].[Month]))'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Closing Unit Sales]} on columns,\n"
+ " {[Time].[1997], [Time].[1997].[Q1], [Time].[1997].[Q1].[1], [Time].[1997].[Q1].[3], [Time].[1997].[Q4].[12]} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Closing Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 26,796\n"
+ "Row #1: 66,291\n"
+ "Row #1: 23,706\n"
+ "Row #2: 21,628\n"
+ "Row #2: 21,628\n"
+ "Row #3: 23,706\n"
+ "Row #3: 23,706\n"
+ "Row #4: 26,796\n"
+ "Row #4: 26,796\n");
}
public void testClosingPeriodLevelNotInTimeFails() {
assertAxisThrows(
"ClosingPeriod([Store].[Store City])",
"The <level> and <member> arguments to ClosingPeriod must be from "
+ "the same hierarchy. The level was from '[Store]' but the member "
+ "was from '[Time]'");
}
public void testClosingPeriodMember() {
if (false) {
// This test is mistaken. Valid forms are ClosingPeriod(<level>)
// and ClosingPeriod(<level>, <member>), but not
// ClosingPeriod(<member>)
Member member = executeSingletonAxis("ClosingPeriod([USA])");
Assert.assertEquals("WA", member.getName());
}
}
public void testClosingPeriodMemberLeaf() {
Member member;
if (false) {
// This test is mistaken. Valid forms are ClosingPeriod(<level>)
// and ClosingPeriod(<level>, <member>), but not
// ClosingPeriod(<member>)
member = executeSingletonAxis(
"ClosingPeriod([Time].[1997].[Q3].[8])");
Assert.assertNull(member);
} else if (isDefaultNullMemberRepresentation()) {
assertQueryReturns(
"with member [Measures].[Foo] as ClosingPeriod().uniquename\n"
+ "select {[Measures].[Foo]} on columns,\n"
+ " {[Time].[1997],\n"
+ " [Time].[1997].[Q2],\n"
+ " [Time].[1997].[Q2].[4]} on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "Row #0: [Time].[1997].[Q4]\n"
+ "Row #1: [Time].[1997].[Q2].[6]\n"
+ "Row #2: [Time].[#null]\n"
// MSAS returns "" here.
+ "");
}
}
public void testClosingPeriod() {
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod([Time].[Month], [Time].[Time].CurrentMember)",
"{[Time]}");
String s1 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"(([Measures].[Store Sales],"
+ " ClosingPeriod([Time].[Month], [Time].[Time].CurrentMember)) - "
+ "([Measures].[Store Cost],"
+ " ClosingPeriod([Time].[Month], [Time].[Time].CurrentMember)))",
s1);
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod([Time].[Month], [Time].[1997].[Q3])", "{}");
assertAxisReturns(
"ClosingPeriod([Time].[Year], [Time].[1997].[Q3])", "");
assertAxisReturns(
"ClosingPeriod([Time].[Quarter], [Time].[1997].[Q3])",
"[Time].[1997].[Q3]");
assertAxisReturns(
"ClosingPeriod([Time].[Month], [Time].[1997].[Q3])",
"[Time].[1997].[Q3].[9]");
assertAxisReturns(
"ClosingPeriod([Time].[Quarter], [Time].[1997])",
"[Time].[1997].[Q4]");
assertAxisReturns(
"ClosingPeriod([Time].[Year], [Time].[1997])", "[Time].[1997]");
assertAxisReturns(
"ClosingPeriod([Time].[Month], [Time].[1997])",
"[Time].[1997].[Q4].[12]");
// leaf member
assertAxisReturns(
"ClosingPeriod([Time].[Year], [Time].[1997].[Q3].[8])", "");
assertAxisReturns(
"ClosingPeriod([Time].[Quarter], [Time].[1997].[Q3].[8])", "");
assertAxisReturns(
"ClosingPeriod([Time].[Month], [Time].[1997].[Q3].[8])",
"[Time].[1997].[Q3].[8]");
// non-Time dimension
assertAxisReturns(
"ClosingPeriod([Product].[Product Name], [Product].[All Products].[Drink])",
"[Product].[Drink].[Dairy].[Dairy].[Milk].[Gorilla].[Gorilla Whole Milk]");
assertAxisReturns(
"ClosingPeriod([Product].[Product Family], [Product].[All Products].[Drink])",
"[Product].[Drink]");
// 'all' level
assertAxisReturns(
"ClosingPeriod([Product].[(All)], [Product].[All Products].[Drink])",
"");
// ragged
getTestContext().withCube("[Sales Ragged]").assertAxisReturns(
"ClosingPeriod([Store].[Store City], [Store].[All Stores].[Israel])",
"[Store].[Israel].[Israel].[Tel Aviv]");
// Default member is [Time].[1997].
assertAxisReturns(
"ClosingPeriod([Time].[Month])", "[Time].[1997].[Q4].[12]");
assertAxisReturns("ClosingPeriod()", "[Time].[1997].[Q4]");
TestContext testContext = getTestContext().withCube("[Sales Ragged]");
testContext.assertAxisReturns(
"ClosingPeriod([Store].[Store State], [Store].[All Stores].[Israel])",
"");
testContext.assertAxisThrows(
"ClosingPeriod([Time].[Year], [Store].[All Stores].[Israel])",
"The <level> and <member> arguments to ClosingPeriod must be "
+ "from the same hierarchy. The level was from '[Time]' but "
+ "the member was from '[Store]'.");
}
public void testClosingPeriodBelow() {
Member member = executeSingletonAxis(
"ClosingPeriod([Quarter],[1997].[Q3].[8])");
Assert.assertNull(member);
}
public void testCousin1() {
Member member = executeSingletonAxis("Cousin([1997].[Q4],[1998])");
Assert.assertEquals("[Time].[1998].[Q4]", member.getUniqueName());
}
public void testCousin2() {
Member member = executeSingletonAxis(
"Cousin([1997].[Q4].[12],[1998].[Q1])");
Assert.assertEquals("[Time].[1998].[Q1].[3]", member.getUniqueName());
}
public void testCousinOverrun() {
Member member = executeSingletonAxis(
"Cousin([Customers].[USA].[CA].[San Jose],"
+ " [Customers].[USA].[OR])");
// CA has more cities than OR
Assert.assertNull(member);
}
public void testCousinThreeDown() {
Member member =
executeSingletonAxis(
"Cousin([Customers].[USA].[CA].[Berkeley].[Barbara Combs],"
+ " [Customers].[Mexico])");
// Barbara Combs is the 6th child
// of the 4th child (Berkeley)
// of the 1st child (CA)
// of USA
// Annmarie Hill is the 6th child
// of the 4th child (Tixapan)
// of the 1st child (DF)
// of Mexico
Assert.assertEquals(
"[Customers].[Mexico].[DF].[Tixapan].[Annmarie Hill]",
member.getUniqueName());
}
public void testCousinSameLevel() {
Member member =
executeSingletonAxis("Cousin([Gender].[M], [Gender].[F])");
Assert.assertEquals("F", member.getName());
}
public void testCousinHigherLevel() {
Member member =
executeSingletonAxis("Cousin([Time].[1997], [Time].[1998].[Q1])");
Assert.assertNull(member);
}
public void testCousinWrongHierarchy() {
assertAxisThrows(
"Cousin([Time].[1997], [Gender].[M])",
MondrianResource.instance().CousinHierarchyMismatch.str(
"[Time].[1997]",
"[Gender].[M]"));
}
public void testParent() {
getTestContext().assertMemberExprDependsOn(
"[Gender].Parent",
"{[Gender]}");
getTestContext().assertMemberExprDependsOn("[Gender].[M].Parent", "{}");
assertAxisReturns(
"{[Store].[USA].[CA].Parent}", "[Store].[USA]");
// root member has null parent
assertAxisReturns("{[Store].[All Stores].Parent}", "");
// parent of null member is null
assertAxisReturns("{[Store].[All Stores].Parent.Parent}", "");
}
public void testParentPC() {
final TestContext testContext = getTestContext().withCube("HR");
testContext.assertAxisReturns(
"[Employees].Parent",
"");
testContext.assertAxisReturns(
"[Employees].[Sheri Nowmer].Parent",
"[Employees].[All Employees]");
testContext.assertAxisReturns(
"[Employees].[Sheri Nowmer].[Derrick Whelply].Parent",
"[Employees].[Sheri Nowmer]");
testContext.assertAxisReturns(
"[Employees].Members.Item(3)",
"[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker]");
testContext.assertAxisReturns(
"[Employees].Members.Item(3).Parent",
"[Employees].[Sheri Nowmer].[Derrick Whelply]");
testContext.assertAxisReturns(
"[Employees].AllMembers.Item(3).Parent",
"[Employees].[Sheri Nowmer].[Derrick Whelply]");
// Ascendants(<Member>) applied to parent-child hierarchy accessed via
// <Level>.Members
testContext.assertAxisReturns(
"Ascendants([Employees].Members.Item(73))",
"[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie].[Ralph Mccoy].[Bertha Jameson].[James Bailey]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie].[Ralph Mccoy].[Bertha Jameson]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie].[Ralph Mccoy]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply]\n"
+ "[Employees].[Sheri Nowmer]\n"
+ "[Employees].[All Employees]");
}
public void testMembers() {
// <Level>.members
assertAxisReturns(
"{[Customers].[Country].Members}",
"[Customers].[Canada]\n"
+ "[Customers].[Mexico]\n"
+ "[Customers].[USA]");
// <Level>.members applied to 'all' level
assertAxisReturns(
"{[Customers].[(All)].Members}", "[Customers].[All Customers]");
// <Level>.members applied to measures dimension
// Note -- no cube-level calculated members are present
assertAxisReturns(
"{[Measures].[MeasuresLevel].Members}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]");
// <Dimension>.members applied to Measures
assertAxisReturns(
"{[Measures].Members}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]");
// <Dimension>.members applied to a query with calc measures
// Again, no calc measures are returned
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"with member [Measures].[Xxx] AS ' [Measures].[Unit Sales] '"
+ "select {[Measures].members} on columns from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n");
}
// <Level>.members applied to a query with calc measures
// Again, no calc measures are returned
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"with member [Measures].[Xxx] AS ' [Measures].[Unit Sales] '"
+ "select {[Measures].[Measures].members} on columns from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n");
}
}
public void testHierarchyMembers() {
assertAxisReturns(
"Head({[Time.Weekly].Members}, 10)",
"[Time].[Weekly].[All Weeklys]\n"
+ "[Time].[Weekly].[1997]\n"
+ "[Time].[Weekly].[1997].[1]\n"
+ "[Time].[Weekly].[1997].[1].[15]\n"
+ "[Time].[Weekly].[1997].[1].[16]\n"
+ "[Time].[Weekly].[1997].[1].[17]\n"
+ "[Time].[Weekly].[1997].[1].[18]\n"
+ "[Time].[Weekly].[1997].[1].[19]\n"
+ "[Time].[Weekly].[1997].[1].[20]\n"
+ "[Time].[Weekly].[1997].[2]");
assertAxisReturns(
"Tail({[Time.Weekly].Members}, 5)",
"[Time].[Weekly].[1998].[51].[5]\n"
+ "[Time].[Weekly].[1998].[51].[29]\n"
+ "[Time].[Weekly].[1998].[51].[30]\n"
+ "[Time].[Weekly].[1998].[52]\n"
+ "[Time].[Weekly].[1998].[52].[6]");
}
public void testAllMembers() {
// <Level>.allmembers
assertAxisReturns(
"{[Customers].[Country].allmembers}",
"[Customers].[Canada]\n"
+ "[Customers].[Mexico]\n"
+ "[Customers].[USA]");
// <Level>.allmembers applied to 'all' level
assertAxisReturns(
"{[Customers].[(All)].allmembers}", "[Customers].[All Customers]");
// <Level>.allmembers applied to measures dimension
// Note -- cube-level calculated members ARE present
assertAxisReturns(
"{[Measures].[MeasuresLevel].allmembers}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]\n"
+ "[Measures].[Profit]\n"
+ "[Measures].[Profit Growth]\n"
+ "[Measures].[Profit last Period]");
// <Dimension>.allmembers applied to Measures
assertAxisReturns(
"{[Measures].allmembers}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]\n"
+ "[Measures].[Profit]\n"
+ "[Measures].[Profit Growth]\n"
+ "[Measures].[Profit last Period]");
// <Dimension>.allmembers applied to a query with calc measures
// Calc measures are returned
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"with member [Measures].[Xxx] AS ' [Measures].[Unit Sales] '"
+ "select {[Measures].allmembers} on columns from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Xxx]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n"
+ "Row #0: $339,610.90\n"
+ "Row #0: 0.0%\n"
+ "Row #0: $339,610.90\n"
+ "Row #0: 266,773\n");
}
// Calc measure members from schema and from query
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"WITH MEMBER [Measures].[Unit to Sales ratio] as\n"
+ " '[Measures].[Unit Sales] / [Measures].[Store Sales]', FORMAT_STRING='0.0%' "
+ "SELECT {[Measures].AllMembers} ON COLUMNS,"
+ "non empty({[Store].[Store State].Members}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Unit to Sales ratio]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 14,431.09\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: 5,498\n"
+ "Row #0: 1,110\n"
+ "Row #0: 14,447.16\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 0.0%\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 46.7%\n"
+ "Row #1: 19,287\n"
+ "Row #1: 16,081.07\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: 6,184\n"
+ "Row #1: 767\n"
+ "Row #1: 10,829.64\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 0.0%\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 48.0%\n"
+ "Row #2: 30,114\n"
+ "Row #2: 25,240.08\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: 9,906\n"
+ "Row #2: 1,104\n"
+ "Row #2: 18,459.60\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 0.0%\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 47.6%\n");
}
// Calc member in query and schema not seen
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"WITH MEMBER [Measures].[Unit to Sales ratio] as '[Measures].[Unit Sales] / [Measures].[Store Sales]', FORMAT_STRING='0.0%' "
+ "SELECT {[Measures].AllMembers} ON COLUMNS,"
+ "non empty({[Store].[Store State].Members}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Unit to Sales ratio]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 14,431.09\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: 5,498\n"
+ "Row #0: 1,110\n"
+ "Row #0: 14,447.16\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 0.0%\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 46.7%\n"
+ "Row #1: 19,287\n"
+ "Row #1: 16,081.07\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: 6,184\n"
+ "Row #1: 767\n"
+ "Row #1: 10,829.64\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 0.0%\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 48.0%\n"
+ "Row #2: 30,114\n"
+ "Row #2: 25,240.08\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: 9,906\n"
+ "Row #2: 1,104\n"
+ "Row #2: 18,459.60\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 0.0%\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 47.6%\n");
}
// Calc member in query and schema not seen
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"WITH MEMBER [Measures].[Unit to Sales ratio] as '[Measures].[Unit Sales] / [Measures].[Store Sales]', FORMAT_STRING='0.0%' "
+ "SELECT {[Measures].Members} ON COLUMNS,"
+ "non empty({[Store].[Store State].Members}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 14,431.09\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: 5,498\n"
+ "Row #0: 1,110\n"
+ "Row #0: 14,447.16\n"
+ "Row #1: 19,287\n"
+ "Row #1: 16,081.07\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: 6,184\n"
+ "Row #1: 767\n"
+ "Row #1: 10,829.64\n"
+ "Row #2: 30,114\n"
+ "Row #2: 25,240.08\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: 9,906\n"
+ "Row #2: 1,104\n"
+ "Row #2: 18,459.60\n");
}
// Calc member in dimension based on level
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,"
+ "non empty({[Store].[Store State].AllMembers}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n"
+ "Row #3: 36,177\n"
+ "Row #3: 76,345.49\n");
// Calc member in dimension based on level not seen
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,"
+ "non empty({[Store].[Store Country].AllMembers}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 66,291\n"
+ "Row #0: 139,628.35\n");
}
public void testAddCalculatedMembers() {
//----------------------------------------------------
// AddCalculatedMembers: Calc member in dimension based on level
// included
//----------------------------------------------------
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,"
+ "AddCalculatedMembers([Store].[USA].Children) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n"
+ "Row #3: 36,177\n"
+ "Row #3: 76,345.49\n");
//----------------------------------------------------
// Calc member in dimension based on level included
// Calc members in measures in schema included
//----------------------------------------------------
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT AddCalculatedMembers({[Measures].[Unit Sales], [Measures].[Store Sales]}) ON COLUMNS,"
+ "AddCalculatedMembers([Store].[USA].Children) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 0.0%\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 0.0%\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 0.0%\n"
+ "Row #3: 36,177\n"
+ "Row #3: 76,345.49\n"
+ "Row #3: $45,833.33\n"
+ "Row #3: $45,833.33\n"
+ "Row #3: 0.0%\n");
//----------------------------------------------------
// Two dimensions
//----------------------------------------------------
assertQueryReturns(
"SELECT AddCalculatedMembers({[Measures].[Unit Sales], [Measures].[Store Sales]}) ON COLUMNS,"
+ "{([Store].[USA].[CA], [Gender].[F])} ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "Row #0: 8,218\n"
+ "Row #0: 17,928.37\n"
+ "Row #0: $10,771.98\n"
+ "Row #0: $10,771.98\n"
+ "Row #0: 0.0%\n");
//----------------------------------------------------
// Should throw more than one dimension error
//----------------------------------------------------
assertAxisThrows(
"AddCalculatedMembers({([Store].[USA].[CA], [Gender].[F])})",
"Only single dimension members allowed in set for "
+ "AddCalculatedMembers");
}
public void testStripCalculatedMembers() {
assertAxisReturns(
"StripCalculatedMembers({[Measures].AllMembers})",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]");
// applied to empty set
assertAxisReturns("StripCalculatedMembers({[Gender].Parent})", "");
getTestContext().assertSetExprDependsOn(
"StripCalculatedMembers([Customers].CurrentMember.Children)",
"{[Customers]}");
// ----------------------------------------------------
// Calc members in dimension based on level stripped
// Actual members in measures left alone
// ----------------------------------------------------
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS "
+ "'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT StripCalculatedMembers({[Measures].[Unit Sales], "
+ "[Measures].[Store Sales]}) ON COLUMNS,"
+ "StripCalculatedMembers("
+ "AddCalculatedMembers([Store].[USA].Children)) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n");
}
public void testCurrentMember() {
// <Dimension>.CurrentMember
assertAxisReturns("[Gender].CurrentMember", "[Gender].[All Gender]");
// <Hierarchy>.CurrentMember
assertAxisReturns(
"[Gender].Hierarchy.CurrentMember", "[Gender].[All Gender]");
// <Level>.CurrentMember
// MSAS doesn't allow this, but Mondrian does: it implicitly casts
// level to hierarchy.
assertAxisReturns("[Store Name].CurrentMember", "[Store].[All Stores]");
}
public void testCurrentMemberDepends() {
getTestContext().assertMemberExprDependsOn(
"[Gender].CurrentMember",
"{[Gender]}");
getTestContext().assertExprDependsOn(
"[Gender].[M].Dimension.Name", "{}");
// implicit call to .CurrentMember when dimension is used as a member
// expression
getTestContext().assertMemberExprDependsOn(
"[Gender].[M].Dimension",
"{[Gender]}");
getTestContext().assertMemberExprDependsOn(
"[Gender].[M].Dimension.CurrentMember", "{[Gender]}");
getTestContext().assertMemberExprDependsOn(
"[Gender].[M].Dimension.CurrentMember.Parent", "{[Gender]}");
// [Customers] is short for [Customers].CurrentMember, so
// depends upon everything
getTestContext().assertExprDependsOn(
"[Customers]", TestContext.allHiers());
}
public void testCurrentMemberFromSlicer() {
Result result = executeQuery(
"with member [Measures].[Foo] as '[Gender].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from Sales where ([Gender].[F])");
Assert.assertEquals("F", result.getCell(new int[]{0}).getValue());
}
public void testCurrentMemberFromDefaultMember() {
Result result = executeQuery(
"with member [Measures].[Foo] as"
+ " '[Time].[Time].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from Sales");
Assert.assertEquals("1997", result.getCell(new int[]{0}).getValue());
}
public void testCurrentMemberMultiHierarchy() {
final String hierarchyName =
MondrianProperties.instance().SsasCompatibleNaming.get()
? "Weekly"
: "Time.Weekly";
final String queryString =
"with member [Measures].[Foo] as\n"
+ " 'IIf(([Time].[Time].CurrentMember.Hierarchy.Name = \""
+ hierarchyName
+ "\"), \n"
+ "[Measures].[Unit Sales], \n"
+ "- [Measures].[Unit Sales])'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} ON COLUMNS,\n"
+ " {[Product].[Food].[Dairy]} ON ROWS\n"
+ "from [Sales]";
Result result =
executeQuery(
queryString + " where [Time].[1997]");
final int[] coords = {1, 0};
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
// As above, but context provided on rows axis as opposed to slicer.
final String queryString1 =
"with member [Measures].[Foo] as\n"
+ " 'IIf(([Time].[Time].CurrentMember.Hierarchy.Name = \""
+ hierarchyName
+ "\"), \n"
+ "[Measures].[Unit Sales], \n"
+ "- [Measures].[Unit Sales])'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} ON COLUMNS,";
final String queryString2 =
"from [Sales]\n"
+ " where [Product].[Food].[Dairy] ";
result =
executeQuery(
queryString1 + " {[Time].[1997]} ON ROWS " + queryString2);
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
result =
executeQuery(
queryString + " where [Time.Weekly].[1997]");
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
result =
executeQuery(
queryString1 + " {[Time.Weekly].[1997]} ON ROWS "
+ queryString2);
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
}
public void testDefaultMember() {
// [Time] has no default member and no all, so the default member is
// the first member of the first level.
Result result =
executeQuery(
"select {[Time].[Time].DefaultMember} on columns\n"
+ "from Sales");
Assert.assertEquals(
"1997",
result.getAxes()[0].getPositions().get(0).get(0).getName());
// [Time].[Weekly] has an all member and no explicit default.
result =
executeQuery(
"select {[Time.Weekly].DefaultMember} on columns\n"
+ "from Sales");
Assert.assertEquals(
MondrianProperties.instance().SsasCompatibleNaming.get()
? "All Weeklys"
: "All Time.Weeklys",
result.getAxes()[0].getPositions().get(0).get(0).getName());
final String memberUname =
MondrianProperties.instance().SsasCompatibleNaming.get()
? "[Time2].[Weekly].[1997].[23]"
: "[Time2.Weekly].[1997].[23]";
TestContext testContext = TestContext.instance().createSubstitutingCube(
"Sales",
" <Dimension name=\"Time2\" type=\"TimeDimension\" foreignKey=\"time_id\">\n"
+ " <Hierarchy hasAll=\"false\" primaryKey=\"time_id\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n"
+ " levelType=\"TimeYears\"/>\n"
+ " <Level name=\"Quarter\" column=\"quarter\" uniqueMembers=\"false\"\n"
+ " levelType=\"TimeQuarters\"/>\n"
+ " <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " levelType=\"TimeMonths\"/>\n"
+ " </Hierarchy>\n"
+ " <Hierarchy hasAll=\"true\" name=\"Weekly\" primaryKey=\"time_id\"\n"
+ " defaultMember=\""
+ memberUname
+ "\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n"
+ " levelType=\"TimeYears\"/>\n"
+ " <Level name=\"Week\" column=\"week_of_year\" type=\"Numeric\" uniqueMembers=\"false\"\n"
+ " levelType=\"TimeWeeks\"/>\n"
+ " <Level name=\"Day\" column=\"day_of_month\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " levelType=\"TimeDays\"/>\n"
+ " </Hierarchy>\n"
+ " </Dimension>");
// In this variant of the schema, Time2.Weekly has an explicit default
// member.
result =
testContext.executeQuery(
"select {[Time2.Weekly].DefaultMember} on columns\n"
+ "from Sales");
Assert.assertEquals(
"23",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testCurrentMemberFromAxis() {
Result result = executeQuery(
"with member [Measures].[Foo] as"
+ " '[Gender].CurrentMember.Name"
+ " || [Marital Status].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns,\n"
+ " CrossJoin({[Gender].children},"
+ " {[Marital Status].children}) on rows\n"
+ "from Sales");
Assert.assertEquals("FM", result.getCell(new int[]{0, 0}).getValue());
}
/**
* When evaluating a calculated member, MSOLAP regards that
* calculated member as the current member of that dimension, so it
* cycles in this case. But I disagree; it is the previous current
* member, before the calculated member was expanded.
*/
public void testCurrentMemberInCalcMember() {
Result result = executeQuery(
"with member [Measures].[Foo] as '[Measures].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from Sales");
Assert.assertEquals(
"Unit Sales", result.getCell(new int[]{0}).getValue());
}
/**
* Tests NamedSet.CurrentOrdinal combined with the Order function.
*/
public void testNamedSetCurrentOrdinalWithOrder() {
// The <Named Set>.CurrentOrdinal only works correctly when named sets
// are evaluated as iterables, and JDK 1.4 only supports lists.
if (Util.Retrowoven) {
return;
}
assertQueryReturns(
"with set [Time Regular] as [Time].[Time].Members\n"
+ " set [Time Reversed] as"
+ " Order([Time Regular], [Time Regular].CurrentOrdinal, BDESC)\n"
+ "select [Time Reversed] on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1998].[Q4].[12]}\n"
+ "{[Time].[1998].[Q4].[11]}\n"
+ "{[Time].[1998].[Q4].[10]}\n"
+ "{[Time].[1998].[Q4]}\n"
+ "{[Time].[1998].[Q3].[9]}\n"
+ "{[Time].[1998].[Q3].[8]}\n"
+ "{[Time].[1998].[Q3].[7]}\n"
+ "{[Time].[1998].[Q3]}\n"
+ "{[Time].[1998].[Q2].[6]}\n"
+ "{[Time].[1998].[Q2].[5]}\n"
+ "{[Time].[1998].[Q2].[4]}\n"
+ "{[Time].[1998].[Q2]}\n"
+ "{[Time].[1998].[Q1].[3]}\n"
+ "{[Time].[1998].[Q1].[2]}\n"
+ "{[Time].[1998].[Q1].[1]}\n"
+ "{[Time].[1998].[Q1]}\n"
+ "{[Time].[1998]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997]}\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 26,796\n"
+ "Row #0: 25,270\n"
+ "Row #0: 19,958\n"
+ "Row #0: 72,024\n"
+ "Row #0: 20,388\n"
+ "Row #0: 21,697\n"
+ "Row #0: 23,763\n"
+ "Row #0: 65,848\n"
+ "Row #0: 21,350\n"
+ "Row #0: 21,081\n"
+ "Row #0: 20,179\n"
+ "Row #0: 62,610\n"
+ "Row #0: 23,706\n"
+ "Row #0: 20,957\n"
+ "Row #0: 21,628\n"
+ "Row #0: 66,291\n"
+ "Row #0: 266,773\n");
}
/**
* Tests NamedSet.CurrentOrdinal combined with the Generate function.
*/
public void testNamedSetCurrentOrdinalWithGenerate() {
// The <Named Set>.CurrentOrdinal only works correctly when named sets
// are evaluated as iterables, and JDK 1.4 only supports lists.
if (Util.Retrowoven) {
return;
}
assertQueryReturns(
" with set [Time Regular] as [Time].[Time].Members\n"
+ "set [Every Other Time] as\n"
+ " Generate(\n"
+ " [Time Regular],\n"
+ " {[Time].[Time].Members.Item(\n"
+ " [Time Regular].CurrentOrdinal * 2)})\n"
+ "select [Every Other Time] on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "{[Time].[1998].[Q1]}\n"
+ "{[Time].[1998].[Q1].[2]}\n"
+ "{[Time].[1998].[Q2]}\n"
+ "{[Time].[1998].[Q2].[5]}\n"
+ "{[Time].[1998].[Q3]}\n"
+ "{[Time].[1998].[Q3].[8]}\n"
+ "{[Time].[1998].[Q4]}\n"
+ "{[Time].[1998].[Q4].[11]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 21,628\n"
+ "Row #0: 23,706\n"
+ "Row #0: 20,179\n"
+ "Row #0: 21,350\n"
+ "Row #0: 23,763\n"
+ "Row #0: 20,388\n"
+ "Row #0: 19,958\n"
+ "Row #0: 26,796\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n");
}
public void testNamedSetCurrentOrdinalWithFilter() {
// The <Named Set>.CurrentOrdinal only works correctly when named sets
// are evaluated as iterables, and JDK 1.4 only supports lists.
if (Util.Retrowoven) {
return;
}
assertQueryReturns(
"with set [Time Regular] as [Time].[Time].Members\n"
+ " set [Time Subset] as "
+ " Filter([Time Regular], [Time Regular].CurrentOrdinal = 3"
+ " or [Time Regular].CurrentOrdinal = 5)\n"
+ "select [Time Subset] on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "Row #0: 20,957\n"
+ "Row #0: 62,610\n");
}
public void testNamedSetCurrentOrdinalWithCrossjoin() {
// TODO:
}
public void testNamedSetCurrentOrdinalWithNonNamedSetFails() {
// a named set wrapped in {...} is not a named set, so CurrentOrdinal
// fails
assertQueryThrows(
"with set [Time Members] as [Time].Members\n"
+ "member [Measures].[Foo] as ' {[Time Members]}.CurrentOrdinal '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
// as above for Current function
assertQueryThrows(
"with set [Time Members] as [Time].Members\n"
+ "member [Measures].[Foo] as ' {[Time Members]}.Current.Name '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
// a set expression is not a named set, so CurrentOrdinal fails
assertQueryThrows(
"with member [Measures].[Foo] as\n"
+ " ' Head([Time].Members, 5).CurrentOrdinal '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
// as above for Current function
assertQueryThrows(
"with member [Measures].[Foo] as\n"
+ " ' Crossjoin([Time].Members, [Gender].Members).Current.Name '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
}
public void testDimensionDefaultMember() {
Member member = executeSingletonAxis("[Measures].DefaultMember");
Assert.assertEquals("Unit Sales", member.getName());
}
public void testDrilldownLevel() {
// Expect all children of USA
assertAxisReturns(
"DrilldownLevel({[Store].[USA]}, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
// Expect same set, because [USA] is already drilled
assertAxisReturns(
"DrilldownLevel({[Store].[USA], [Store].[USA].[CA]}, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]");
// Expect drill, because [USA] isn't already drilled. You can't
// drill down on [CA] and get to [USA]
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]}, [Store].[Store Country])",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]},, 0)",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]} * {[Gender].Members},, 0)",
"{[Store].[USA].[CA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Gender].[M]}\n"
+ "{[Store].[USA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[OR], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[WA], [Gender].[All Gender]}\n"
+ "{[Store].[USA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[OR], [Gender].[F]}\n"
+ "{[Store].[USA].[WA], [Gender].[F]}\n"
+ "{[Store].[USA], [Gender].[M]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA].[OR], [Gender].[M]}\n"
+ "{[Store].[USA].[WA], [Gender].[M]}");
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]} * {[Gender].Members},, 1)",
"{[Store].[USA].[CA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA], [Gender].[All Gender]}\n"
+ "{[Store].[USA], [Gender].[F]}\n"
+ "{[Store].[USA], [Gender].[M]}\n"
+ "{[Store].[USA], [Gender].[F]}\n"
+ "{[Store].[USA], [Gender].[M]}");
}
public void testDrilldownLevelTop() {
// <set>, <n>, <level>
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]");
// similarly DrilldownLevelBottom
assertAxisReturns(
"DrilldownLevelBottom({[Store].[USA]}, 2, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[CA]");
// <set>, <n>
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2)",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]");
// <n> greater than number of children
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA], [Store].[Canada]}, 4)",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[Canada]\n"
+ "[Store].[Canada].[BC]");
// <n> negative
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2 - 3)",
"[Store].[USA]");
// <n> zero
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2 - 2)",
"[Store].[USA]");
// <n> null
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, null)",
"[Store].[USA]");
// mixed bag, no level, all expanded
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA], "
+ "[Store].[USA].[CA].[San Francisco], "
+ "[Store].[All Stores], "
+ "[Store].[Canada].[BC]}, "
+ "2)",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[CA].[San Francisco].[Store 14]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[USA]\n"
+ "[Store].[Canada]\n"
+ "[Store].[Canada].[BC]\n"
+ "[Store].[Canada].[BC].[Vancouver]\n"
+ "[Store].[Canada].[BC].[Victoria]");
// mixed bag, only specified level expanded
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA], "
+ "[Store].[USA].[CA].[San Francisco], "
+ "[Store].[All Stores], "
+ "[Store].[Canada].[BC]}, 2, [Store].[Store City])",
"[Store].[USA]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[CA].[San Francisco].[Store 14]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[Canada].[BC]");
// bad level
assertAxisThrows(
"DrilldownLevelTop({[Store].[USA]}, 2, [Customers].[Country])",
"Level '[Customers].[Country]' not compatible with "
+ "member '[Store].[USA]'");
}
public void testDrilldownMemberEmptyExpr() {
// no level, with expression
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2, , [Measures].[Unit Sales])",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]");
// reverse expression
assertAxisReturns(
"DrilldownLevelTop("
+ "{[Store].[USA]}, 2, , - [Measures].[Unit Sales])",
"[Store].[USA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[CA]");
}
public void testDrilldownMember() {
// Expect all children of USA
assertAxisReturns(
"DrilldownMember({[Store].[USA]}, {[Store].[USA]})",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
// Expect all children of USA.CA and USA.OR
assertAxisReturns(
"DrilldownMember({[Store].[USA].[CA], [Store].[USA].[OR]}, "
+ "{[Store].[USA].[CA], [Store].[USA].[OR], [Store].[USA].[WA]})",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[OR].[Salem]");
// Second set is empty
assertAxisReturns(
"DrilldownMember({[Store].[USA]}, {})",
"[Store].[USA]");
// Drill down a leaf member
assertAxisReturns(
"DrilldownMember({[Store].[All Stores].[USA].[CA].[San Francisco].[Store 14]}, "
+ "{[Store].[USA].[CA].[San Francisco].[Store 14]})",
"[Store].[USA].[CA].[San Francisco].[Store 14]");
// Complex case with option recursive
assertAxisReturns(
"DrilldownMember({[Store].[All Stores].[USA]}, "
+ "{[Store].[All Stores].[USA], [Store].[All Stores].[USA].[CA], "
+ "[Store].[All Stores].[USA].[CA].[San Diego], [Store].[All Stores].[USA].[WA]}, "
+ "RECURSIVE)",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Diego].[Store 24]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[WA].[Bellingham]\n"
+ "[Store].[USA].[WA].[Bremerton]\n"
+ "[Store].[USA].[WA].[Seattle]\n"
+ "[Store].[USA].[WA].[Spokane]\n"
+ "[Store].[USA].[WA].[Tacoma]\n"
+ "[Store].[USA].[WA].[Walla Walla]\n"
+ "[Store].[USA].[WA].[Yakima]");
// Sets of tuples
assertAxisReturns(
"DrilldownMember({([Store Type].[Supermarket], [Store].[USA])}, {[Store].[USA]})",
"{[Store Type].[Supermarket], [Store].[USA]}\n"
+ "{[Store Type].[Supermarket], [Store].[USA].[CA]}\n"
+ "{[Store Type].[Supermarket], [Store].[USA].[OR]}\n"
+ "{[Store Type].[Supermarket], [Store].[USA].[WA]}");
}
public void testFirstChildFirstInLevel() {
Member member = executeSingletonAxis("[Time].[1997].[Q4].FirstChild");
Assert.assertEquals("10", member.getName());
}
public void testFirstChildAll() {
Member member =
executeSingletonAxis("[Gender].[All Gender].FirstChild");
Assert.assertEquals("F", member.getName());
}
public void testFirstChildOfChildless() {
Member member =
executeSingletonAxis("[Gender].[All Gender].[F].FirstChild");
Assert.assertNull(member);
}
public void testFirstSiblingFirstInLevel() {
Member member = executeSingletonAxis("[Gender].[F].FirstSibling");
Assert.assertEquals("F", member.getName());
}
public void testFirstSiblingLastInLevel() {
Member member =
executeSingletonAxis("[Time].[1997].[Q4].FirstSibling");
Assert.assertEquals("Q1", member.getName());
}
public void testFirstSiblingAll() {
Member member =
executeSingletonAxis("[Gender].[All Gender].FirstSibling");
Assert.assertTrue(member.isAll());
}
public void testFirstSiblingRoot() {
// The [Measures] hierarchy does not have an 'all' member, so
// [Unit Sales] does not have a parent.
Member member =
executeSingletonAxis("[Measures].[Store Sales].FirstSibling");
Assert.assertEquals("Unit Sales", member.getName());
}
public void testFirstSiblingNull() {
Member member =
executeSingletonAxis("[Gender].[F].FirstChild.FirstSibling");
Assert.assertNull(member);
}
public void testLag() {
Member member = executeSingletonAxis("[Time].[1997].[Q4].[12].Lag(4)");
Assert.assertEquals("8", member.getName());
}
public void testLagFirstInLevel() {
Member member = executeSingletonAxis("[Gender].[F].Lag(1)");
Assert.assertNull(member);
}
public void testLagAll() {
Member member = executeSingletonAxis("[Gender].DefaultMember.Lag(2)");
Assert.assertNull(member);
}
public void testLagRoot() {
Member member = executeSingletonAxis("[Time].[1998].Lag(1)");
Assert.assertEquals("1997", member.getName());
}
public void testLagRootTooFar() {
Member member = executeSingletonAxis("[Time].[1998].Lag(2)");
Assert.assertNull(member);
}
public void testLastChild() {
Member member = executeSingletonAxis("[Gender].LastChild");
Assert.assertEquals("M", member.getName());
}
public void testLastChildLastInLevel() {
Member member = executeSingletonAxis("[Time].[1997].[Q4].LastChild");
Assert.assertEquals("12", member.getName());
}
public void testLastChildAll() {
Member member = executeSingletonAxis("[Gender].[All Gender].LastChild");
Assert.assertEquals("M", member.getName());
}
public void testLastChildOfChildless() {
Member member = executeSingletonAxis("[Gender].[M].LastChild");
Assert.assertNull(member);
}
public void testLastSibling() {
Member member = executeSingletonAxis("[Gender].[F].LastSibling");
Assert.assertEquals("M", member.getName());
}
public void testLastSiblingFirstInLevel() {
Member member = executeSingletonAxis("[Time].[1997].[Q1].LastSibling");
Assert.assertEquals("Q4", member.getName());
}
public void testLastSiblingAll() {
Member member =
executeSingletonAxis("[Gender].[All Gender].LastSibling");
Assert.assertTrue(member.isAll());
}
public void testLastSiblingRoot() {
// The [Time] hierarchy does not have an 'all' member, so
// [1997], [1998] do not have parents.
Member member = executeSingletonAxis("[Time].[1998].LastSibling");
Assert.assertEquals("1998", member.getName());
}
public void testLastSiblingNull() {
Member member =
executeSingletonAxis("[Gender].[F].FirstChild.LastSibling");
Assert.assertNull(member);
}
public void testLead() {
Member member = executeSingletonAxis("[Time].[1997].[Q2].[4].Lead(4)");
Assert.assertEquals("8", member.getName());
}
public void testLeadNegative() {
Member member = executeSingletonAxis("[Gender].[M].Lead(-1)");
Assert.assertEquals("F", member.getName());
}
public void testLeadLastInLevel() {
Member member = executeSingletonAxis("[Gender].[M].Lead(3)");
Assert.assertNull(member);
}
public void testLeadNull() {
Member member = executeSingletonAxis("[Gender].Parent.Lead(1)");
Assert.assertNull(member);
}
public void testLeadZero() {
Member member = executeSingletonAxis("[Gender].[F].Lead(0)");
Assert.assertEquals("F", member.getName());
}
public void testBasic2() {
Result result =
executeQuery(
"select {[Gender].[F].NextMember} ON COLUMNS from Sales");
assertEquals(
"M",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testFirstInLevel2() {
Result result =
executeQuery(
"select {[Gender].[M].NextMember} ON COLUMNS from Sales");
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testAll2() {
Result result =
executeQuery("select {[Gender].PrevMember} ON COLUMNS from Sales");
// previous to [Gender].[All] is null, so no members are returned
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testBasic5() {
Result result =
executeQuery(
"select{ [Product].[All Products].[Drink].Parent} on columns "
+ "from Sales");
assertEquals(
"All Products",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testFirstInLevel5() {
Result result =
executeQuery(
"select {[Time].[1997].[Q2].[4].Parent} on columns,"
+ "{[Gender].[M]} on rows from Sales");
assertEquals(
"Q2",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testAll5() {
Result result =
executeQuery(
"select {[Time].[1997].[Q2].Parent} on columns,"
+ "{[Gender].[M]} on rows from Sales");
// previous to [Gender].[All] is null, so no members are returned
assertEquals(
"1997",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testBasic() {
Result result =
executeQuery(
"select {[Gender].[M].PrevMember} ON COLUMNS from Sales");
assertEquals(
"F",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testFirstInLevel() {
Result result =
executeQuery(
"select {[Gender].[F].PrevMember} ON COLUMNS from Sales");
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testAll() {
Result result =
executeQuery("select {[Gender].PrevMember} ON COLUMNS from Sales");
// previous to [Gender].[All] is null, so no members are returned
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testAggregateDepends() {
// Depends on everything except Measures, Gender
String s12 = TestContext.allHiersExcept("[Measures]", "[Gender]");
getTestContext().assertExprDependsOn(
"([Measures].[Unit Sales], [Gender].[F])", s12);
// Depends on everything except Customers, Measures, Gender
String s13 = TestContext.allHiersExcept("[Customers]", "[Gender]");
getTestContext().assertExprDependsOn(
"Aggregate([Customers].Members, ([Measures].[Unit Sales], [Gender].[F]))",
s13);
// Depends on everything except Customers
String s11 = TestContext.allHiersExcept("[Customers]");
getTestContext().assertExprDependsOn(
"Aggregate([Customers].Members)",
s11);
// Depends on the current member of the Product dimension, even though
// [Product].[All Products] is referenced from the expression.
String s1 = TestContext.allHiersExcept("[Customers]");
getTestContext().assertExprDependsOn(
"Aggregate(Filter([Customers].[City].Members, (([Measures].[Unit Sales] / ([Measures].[Unit Sales], [Product].[All Products])) > 0.1)))",
s1);
}
public void testAggregate() {
assertQueryReturns(
"WITH MEMBER [Store].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})'\n"
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,\n"
+ " {[Store].[USA].[CA], [Store].[USA].[OR], [Store].[CA plus OR]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 36,177\n"
+ "Row #2: 76,345.49\n");
}
public void testAggregate2() {
assertQueryReturns(
"WITH\n"
+ " Member [Time].[Time].[1st Half Sales] AS 'Aggregate({Time.[1997].[Q1], Time.[1997].[Q2]})'\n"
+ " Member [Time].[Time].[2nd Half Sales] AS 'Aggregate({Time.[1997].[Q3], Time.[1997].[Q4]})'\n"
+ " Member [Time].[Time].[Difference] AS 'Time.[2nd Half Sales] - Time.[1st Half Sales]'\n"
+ "SELECT\n"
+ " { [Store].[Store State].Members} ON COLUMNS,\n"
+ " { Time.[1st Half Sales], Time.[2nd Half Sales], Time.[Difference]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE [Measures].[Store Sales]",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Store].[Canada].[BC]}\n"
+ "{[Store].[Mexico].[DF]}\n"
+ "{[Store].[Mexico].[Guerrero]}\n"
+ "{[Store].[Mexico].[Jalisco]}\n"
+ "{[Store].[Mexico].[Veracruz]}\n"
+ "{[Store].[Mexico].[Yucatan]}\n"
+ "{[Store].[Mexico].[Zacatecas]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Axis #2:\n"
+ "{[Time].[1st Half Sales]}\n"
+ "{[Time].[2nd Half Sales]}\n"
+ "{[Time].[Difference]}\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 74,571.95\n"
+ "Row #0: 71,943.17\n"
+ "Row #0: 125,779.50\n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: 84,595.89\n"
+ "Row #1: 70,333.90\n"
+ "Row #1: 138,013.72\n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: 10,023.94\n"
+ "Row #2: -1,609.27\n"
+ "Row #2: 12,234.22\n");
}
public void testAggregateWithIIF() {
assertQueryReturns(
"with member store.foo as 'iif(3>1,"
+ "aggregate({[Store].[All Stores].[USA].[OR]}),"
+ "aggregate({[Store].[All Stores].[USA].[CA]}))' "
+ "select {store.foo} on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[foo]}\n"
+ "Row #0: 67,659\n");
}
public void testAggregate2AllMembers() {
assertQueryReturns(
"WITH\n"
+ " Member [Time].[Time].[1st Half Sales] AS 'Aggregate({Time.[1997].[Q1], Time.[1997].[Q2]})'\n"
+ " Member [Time].[Time].[2nd Half Sales] AS 'Aggregate({Time.[1997].[Q3], Time.[1997].[Q4]})'\n"
+ " Member [Time].[Time].[Difference] AS 'Time.[2nd Half Sales] - Time.[1st Half Sales]'\n"
+ "SELECT\n"
+ " { [Store].[Store State].AllMembers} ON COLUMNS,\n"
+ " { Time.[1st Half Sales], Time.[2nd Half Sales], Time.[Difference]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE [Measures].[Store Sales]",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Store].[Canada].[BC]}\n"
+ "{[Store].[Mexico].[DF]}\n"
+ "{[Store].[Mexico].[Guerrero]}\n"
+ "{[Store].[Mexico].[Jalisco]}\n"
+ "{[Store].[Mexico].[Veracruz]}\n"
+ "{[Store].[Mexico].[Yucatan]}\n"
+ "{[Store].[Mexico].[Zacatecas]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Axis #2:\n"
+ "{[Time].[1st Half Sales]}\n"
+ "{[Time].[2nd Half Sales]}\n"
+ "{[Time].[Difference]}\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 74,571.95\n"
+ "Row #0: 71,943.17\n"
+ "Row #0: 125,779.50\n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: 84,595.89\n"
+ "Row #1: 70,333.90\n"
+ "Row #1: 138,013.72\n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: 10,023.94\n"
+ "Row #2: -1,609.27\n"
+ "Row #2: 12,234.22\n");
}
public void testAggregateToSimulateCompoundSlicer() {
assertQueryReturns(
"WITH MEMBER [Time].[Time].[1997 H1] as 'Aggregate({[Time].[1997].[Q1], [Time].[1997].[Q2]})'\n"
+ " MEMBER [Education Level].[College or higher] as 'Aggregate({[Education Level].[Bachelors Degree], [Education Level].[Graduate Degree]})'\n"
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} on columns,\n"
+ " {[Product].children} on rows\n"
+ "FROM [Sales]\n"
+ "WHERE ([Time].[1997 H1], [Education Level].[College or higher], [Gender].[F])",
"Axis #0:\n"
+ "{[Time].[1997 H1], [Education Level].[College or higher], [Gender].[F]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 1,797\n"
+ "Row #0: 3,620.49\n"
+ "Row #1: 15,002\n"
+ "Row #1: 31,931.88\n"
+ "Row #2: 3,845\n"
+ "Row #2: 8,173.22\n");
}
/**
* Tests behavior where CurrentMember occurs in calculated members and
* that member is a set.
*
* <p>Mosha discusses this behavior in the article
* <a href="http://www.mosha.com/msolap/articles/mdxmultiselectcalcs.htm">
* Multiselect friendly MDX calculations</a>.
*
* <p>Mondrian's behavior is consistent with MSAS 2K: it returns zeroes.
* SSAS 2005 returns an error, which can be fixed by reformulating the
* calculated members.
*
* @see mondrian.rolap.FastBatchingCellReaderTest#testAggregateDistinctCount()
*/
public void testMultiselectCalculations() {
assertQueryReturns(
"WITH\n"
+ "MEMBER [Measures].[Declining Stores Count] AS\n"
+ " ' Count(Filter(Descendants(Store.CurrentMember, Store.[Store Name]), [Store Sales] < ([Store Sales],Time.Time.PrevMember))) '\n"
+ " MEMBER \n"
+ " [Store].[XL_QZX] AS 'Aggregate ({ [Store].[All Stores].[USA].[WA] , [Store].[All Stores].[USA].[CA] })' \n"
+ "SELECT \n"
+ " NON EMPTY HIERARCHIZE(AddCalculatedMembers({DrillDownLevel({[Product].[All Products]})})) \n"
+ " DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON COLUMNS \n"
+ "FROM [Sales] \n"
+ "WHERE ([Measures].[Declining Stores Count], [Time].[1998].[Q3], [Store].[XL_QZX])",
"Axis #0:\n"
+ "{[Measures].[Declining Stores Count], [Time].[1998].[Q3], [Store].[XL_QZX]}\n"
+ "Axis #1:\n"
+ "{[Product].[All Products]}\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: .00\n"
+ "Row #0: .00\n"
+ "Row #0: .00\n"
+ "Row #0: .00\n");
}
public void testAvg() {
assertExprReturns(
"AVG({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"188,412.71");
}
// todo: testAvgWithNulls
public void testCorrelation() {
assertExprReturns(
"Correlation({[Store].[All Stores].[USA].children}, [Measures].[Unit Sales], [Measures].[Store Sales]) * 1000000",
"999,906");
}
public void testCount() {
getTestContext().assertExprDependsOn(
"count(Crossjoin([Store].[All Stores].[USA].Children, {[Gender].children}), INCLUDEEMPTY)",
"{[Gender]}");
String s1 = TestContext.allHiersExcept("[Store]");
getTestContext().assertExprDependsOn(
"count(Crossjoin([Store].[All Stores].[USA].Children, "
+ "{[Gender].children}), EXCLUDEEMPTY)",
s1);
assertExprReturns(
"count({[Promotion Media].[Media Type].members})", "14");
// applied to an empty set
assertExprReturns("count({[Gender].Parent}, IncludeEmpty)", "0");
}
public void testCountExcludeEmpty() {
String s1 = TestContext.allHiersExcept("[Store]");
getTestContext().assertExprDependsOn(
"count(Crossjoin([Store].[USA].Children, {[Gender].children}), EXCLUDEEMPTY)",
s1);
assertQueryReturns(
"with member [Measures].[Promo Count] as \n"
+ " ' Count(Crossjoin({[Measures].[Unit Sales]},\n"
+ " {[Promotion Media].[Media Type].members}), EXCLUDEEMPTY)'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Promo Count]} on columns,\n"
+ " {[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].children} on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Promo Count]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Excellent]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Fabulous]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Skinner]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Token]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Washington]}\n"
+ "Row #0: 738\n"
+ "Row #0: 14\n"
+ "Row #1: 632\n"
+ "Row #1: 13\n"
+ "Row #2: 655\n"
+ "Row #2: 14\n"
+ "Row #3: 735\n"
+ "Row #3: 14\n"
+ "Row #4: 647\n"
+ "Row #4: 12\n");
// applied to an empty set
assertExprReturns("count({[Gender].Parent}, ExcludeEmpty)", "0");
}
/**
* Tests that the 'null' value is regarded as empty, even if the underlying
* cell has fact table rows.
*
* <p>For a fuller test case, see
* {@link mondrian.xmla.XmlaCognosTest#testCognosMDXSuiteConvertedAdventureWorksToFoodMart_015()}
*/
public void testCountExcludeEmptyNull() {
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS\n"
+ " Iif("
+ TestContext.hierarchyName("Time", "Time")
+ ".CurrentMember.Name = 'Q2', 1, NULL)\n"
+ " MEMBER [Measures].[Bar] AS\n"
+ " Iif("
+ TestContext.hierarchyName("Time", "Time")
+ ".CurrentMember.Name = 'Q2', 1, 0)\n"
+ " Member [Time].[Time].[CountExc] AS\n"
+ " Count([Time].[1997].Children, EXCLUDEEMPTY),\n"
+ " SOLVE_ORDER = 2\n"
+ " Member [Time].[Time].[CountInc] AS\n"
+ " Count([Time].[1997].Children, INCLUDEEMPTY),\n"
+ " SOLVE_ORDER = 2\n"
+ "SELECT {[Measures].[Foo],\n"
+ " [Measures].[Bar],\n"
+ " [Measures].[Unit Sales]} ON 0,\n"
+ " {[Time].[1997].Children,\n"
+ " [Time].[CountExc],\n"
+ " [Time].[CountInc]} ON 1\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "{[Measures].[Bar]}\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[CountExc]}\n"
+ "{[Time].[CountInc]}\n"
+ "Row #0: \n"
+ "Row #0: 0\n"
+ "Row #0: 66,291\n"
+ "Row #1: 1\n"
+ "Row #1: 1\n"
+ "Row #1: 62,610\n"
+ "Row #2: \n"
+ "Row #2: 0\n"
+ "Row #2: 65,848\n"
+ "Row #3: \n"
+ "Row #3: 0\n"
+ "Row #3: 72,024\n"
+ "Row #4: 1\n"
+ "Row #4: 4\n"
+ "Row #4: 4\n"
+ "Row #5: 4\n"
+ "Row #5: 4\n"
+ "Row #5: 4\n");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-710">
* bug MONDRIAN-710, "Count with ExcludeEmpty throws an exception when the
* cube does not have a factCountMeasure"</a>.
*/
public void testCountExcludeEmptyOnCubeWithNoCountFacts() {
assertQueryReturns(
"WITH "
+ " MEMBER [Measures].[count] AS '"
+ " COUNT([Store Type].[Store Type].MEMBERS, EXCLUDEEMPTY)'"
+ " SELECT "
+ " {[Measures].[count]} ON AXIS(0)"
+ " FROM [Warehouse]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[count]}\n"
+ "Row #0: 5\n");
}
public void testCountExcludeEmptyOnVirtualCubeWithNoCountFacts() {
assertQueryReturns(
"WITH "
+ " MEMBER [Measures].[count] AS '"
+ " COUNT([Store].MEMBERS, EXCLUDEEMPTY)'"
+ " SELECT "
+ " {[Measures].[count]} ON AXIS(0)"
+ " FROM [Warehouse and Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[count]}\n"
+ "Row #0: 31\n");
}
// todo: testCountNull, testCountNoExp
public void testCovariance() {
assertExprReturns(
"Covariance({[Store].[All Stores].[USA].children}, [Measures].[Unit Sales], [Measures].[Store Sales])",
"1,355,761,899");
}
public void testCovarianceN() {
assertExprReturns(
"CovarianceN({[Store].[All Stores].[USA].children}, [Measures].[Unit Sales], [Measures].[Store Sales])",
"2,033,642,849");
}
public void testIIfNumeric() {
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, 45, 32)",
"45");
// Compare two members. The system needs to figure out that they are
// both numeric, and use the right overloaded version of ">", otherwise
// we'll get a ClassCastException at runtime.
assertExprReturns(
"IIf([Measures].[Unit Sales] > [Measures].[Store Sales], 45, 32)",
"32");
}
public void testMax() {
assertExprReturns(
"MAX({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"263,793.22");
}
public void testMaxNegative() {
// Bug 1771928, "Max() works incorrectly with negative values"
assertQueryReturns(
"with \n"
+ " member [Customers].[Neg] as '-1'\n"
+ " member [Customers].[Min] as 'Min({[Customers].[Neg]})'\n"
+ " member [Customers].[Max] as 'Max({[Customers].[Neg]})'\n"
+ "select {[Customers].[Neg],[Customers].[Min],[Customers].[Max]} on 0\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[Neg]}\n"
+ "{[Customers].[Min]}\n"
+ "{[Customers].[Max]}\n"
+ "Row #0: -1\n"
+ "Row #0: -1\n"
+ "Row #0: -1\n");
}
public void testMedian() {
assertExprReturns(
"MEDIAN({[Store].[All Stores].[USA].children},"
+ "[Measures].[Store Sales])",
"159,167.84");
}
public void testMedian2() {
assertQueryReturns(
"WITH\n"
+ " Member [Time].[Time].[1st Half Sales] AS 'Sum({[Time].[1997].[Q1], [Time].[1997].[Q2]})'\n"
+ " Member [Time].[Time].[2nd Half Sales] AS 'Sum({[Time].[1997].[Q3], [Time].[1997].[Q4]})'\n"
+ " Member [Time].[Time].[Median] AS 'Median(Time.[Time].Members)'\n"
+ "SELECT\n"
+ " NON EMPTY { [Store].[Store Name].Members} ON COLUMNS,\n"
+ " { [Time].[1st Half Sales], [Time].[2nd Half Sales], [Time].[Median]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE [Measures].[Store Sales]",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA].[Beverly Hills].[Store 6]}\n"
+ "{[Store].[USA].[CA].[Los Angeles].[Store 7]}\n"
+ "{[Store].[USA].[CA].[San Diego].[Store 24]}\n"
+ "{[Store].[USA].[CA].[San Francisco].[Store 14]}\n"
+ "{[Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Store].[USA].[OR].[Salem].[Store 13]}\n"
+ "{[Store].[USA].[WA].[Bellingham].[Store 2]}\n"
+ "{[Store].[USA].[WA].[Bremerton].[Store 3]}\n"
+ "{[Store].[USA].[WA].[Seattle].[Store 15]}\n"
+ "{[Store].[USA].[WA].[Spokane].[Store 16]}\n"
+ "{[Store].[USA].[WA].[Tacoma].[Store 17]}\n"
+ "{[Store].[USA].[WA].[Walla Walla].[Store 22]}\n"
+ "{[Store].[USA].[WA].[Yakima].[Store 23]}\n"
+ "Axis #2:\n"
+ "{[Time].[1st Half Sales]}\n"
+ "{[Time].[2nd Half Sales]}\n"
+ "{[Time].[Median]}\n"
+ "Row #0: 20,801.04\n"
+ "Row #0: 25,421.41\n"
+ "Row #0: 26,275.11\n"
+ "Row #0: 2,074.39\n"
+ "Row #0: 28,519.18\n"
+ "Row #0: 43,423.99\n"
+ "Row #0: 2,140.99\n"
+ "Row #0: 25,502.08\n"
+ "Row #0: 25,293.50\n"
+ "Row #0: 23,265.53\n"
+ "Row #0: 34,926.91\n"
+ "Row #0: 2,159.60\n"
+ "Row #0: 12,490.89\n"
+ "Row #1: 24,949.20\n"
+ "Row #1: 29,123.87\n"
+ "Row #1: 28,156.03\n"
+ "Row #1: 2,366.79\n"
+ "Row #1: 26,539.61\n"
+ "Row #1: 43,794.29\n"
+ "Row #1: 2,598.24\n"
+ "Row #1: 27,394.22\n"
+ "Row #1: 27,350.57\n"
+ "Row #1: 26,368.93\n"
+ "Row #1: 39,917.05\n"
+ "Row #1: 2,546.37\n"
+ "Row #1: 11,838.34\n"
+ "Row #2: 4,577.35\n"
+ "Row #2: 5,211.38\n"
+ "Row #2: 4,722.87\n"
+ "Row #2: 398.24\n"
+ "Row #2: 5,039.50\n"
+ "Row #2: 7,374.59\n"
+ "Row #2: 410.22\n"
+ "Row #2: 4,924.04\n"
+ "Row #2: 4,569.13\n"
+ "Row #2: 4,511.68\n"
+ "Row #2: 6,630.91\n"
+ "Row #2: 419.51\n"
+ "Row #2: 2,169.48\n");
}
public void testPercentile() {
// same result as median
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].children}, [Measures].[Store Sales], 50)",
"159,167.84");
// same result as min
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].children}, [Measures].[Store Sales], 0)",
"142,277.07");
// same result as max
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].children}, [Measures].[Store Sales], 100)",
"263,793.22");
// check some real percentile cases
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].[WA].children}, [Measures].[Store Sales], 50)",
"49,634.46");
// lets return the second element of the 7 children 4,739.23
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].[WA].children}, [Measures].[Store Sales], 100/7*2)",
"4,739.23");
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].[WA].children}, [Measures].[Store Sales], 95)",
"67,162.28");
}
/**
* Testcase for bug
* <a href="http://jira.pentaho.com/browse/MONDRIAN-1045">MONDRIAN-1045,
* "When I use the Percentile function it cracks when there's only
* 1 register"</a>.
*/
public void testPercentileBugMondrian1045() {
assertExprReturns(
"Percentile({[Store].[All Stores].[USA]}, [Measures].[Store Sales], 50)",
"282,619.07");
assertExprReturns(
"Percentile({[Store].[All Stores].[USA]}, [Measures].[Store Sales], 40)",
"226,095.25");
assertExprReturns(
"Percentile({[Store].[All Stores].[USA]}, [Measures].[Store Sales], 95)",
"536,976.22");
}
public void testMin() {
assertExprReturns(
"MIN({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"142,277.07");
}
public void testMinTuple() {
assertExprReturns(
"Min([Customers].[All Customers].[USA].Children, ([Measures].[Unit Sales], [Gender].[All Gender].[F]))",
"33,036");
}
public void testStdev() {
assertExprReturns(
"STDEV({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"65,825.45");
}
public void testStdevP() {
assertExprReturns(
"STDEVP({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"53,746.26");
}
public void testSumNoExp() {
assertExprReturns(
"SUM({[Promotion Media].[Media Type].members})", "266,773");
}
public void testValue() {
// VALUE is usually a cell property, not a member property.
// We allow it because MS documents it as a function, <Member>.VALUE.
assertExprReturns("[Measures].[Store Sales].VALUE", "565,238.13");
// Depends upon almost everything.
String s1 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"[Measures].[Store Sales].VALUE", s1);
// We do not allow FORMATTED_VALUE.
assertExprThrows(
"[Measures].[Store Sales].FORMATTED_VALUE",
"MDX object '[Measures].[Store Sales].FORMATTED_VALUE' not found in cube 'Sales'");
assertExprReturns("[Measures].[Store Sales].NAME", "Store Sales");
// MS says that ID and KEY are standard member properties for
// OLE DB for OLAP, but not for XML/A. We don't support them.
assertExprThrows(
"[Measures].[Store Sales].ID",
"MDX object '[Measures].[Store Sales].ID' not found in cube 'Sales'");
// Error for KEY is slightly different than for ID. It doesn't matter
// very much.
//
// The error is different because KEY is registered as a Mondrian
// builtin property, but ID isn't. KEY cannot be evaluated in
// "<MEMBER>.KEY" syntax because there is not function defined. For
// other builtin properties, such as NAME, CAPTION there is a builtin
// function.
assertExprThrows(
"[Measures].[Store Sales].KEY",
"No function matches signature '<Member>.KEY'");
assertExprReturns("[Measures].[Store Sales].CAPTION", "Store Sales");
}
public void testVar() {
assertExprReturns(
"VAR({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"4,332,990,493.69");
}
public void testVarP() {
assertExprReturns(
"VARP({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"2,888,660,329.13");
}
/**
* Tests the AS operator, that gives an expression an alias.
*/
public void testAs() {
assertAxisReturns(
"Filter([Customers].Children as t,\n"
+ "t.Current.Name = 'USA')",
"[Customers].[USA]");
// 'AS' and the ':' operator have similar precedence, so it's worth
// checking that they play nice.
assertQueryReturns(
"select\n"
+ " filter(\n"
+ " [Time].[1997].[Q1].[2] : [Time].[1997].[Q3].[9] as t,"
+ " mod(t.CurrentOrdinal, 2) = 0) on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Row #0: 20,957\n"
+ "Row #0: 20,179\n"
+ "Row #0: 21,350\n"
+ "Row #0: 21,697\n");
// AS member fails on SSAS with "The CHILDREN function expects a member
// expression for the 0 argument. A tuple set expression was used."
assertQueryThrows(
"select\n"
+ " {([Time].[1997].[Q1] as t).Children, \n"
+ " t.Parent } on 0 \n"
+ "from [Sales]",
"No function matches signature '<Set>.Children'");
// Set of members. OK.
assertQueryReturns(
"select Measures.[Unit Sales] on 0, \n"
+ " {[Time].[1997].Children as t, \n"
+ " Descendants(t, [Time].[Month])} on 1 \n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 66,291\n"
+ "Row #1: 62,610\n"
+ "Row #2: 65,848\n"
+ "Row #3: 72,024\n"
+ "Row #4: 21,628\n"
+ "Row #5: 20,957\n"
+ "Row #6: 23,706\n"
+ "Row #7: 20,179\n"
+ "Row #8: 21,081\n"
+ "Row #9: 21,350\n"
+ "Row #10: 23,763\n"
+ "Row #11: 21,697\n"
+ "Row #12: 20,388\n"
+ "Row #13: 19,958\n"
+ "Row #14: 25,270\n"
+ "Row #15: 26,796\n");
// Alias a member. Implicitly becomes set. OK.
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " {[Time].[1997] as t,\n"
+ " Descendants(t, [Time].[Month])} on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 21,628\n"
+ "Row #2: 20,957\n"
+ "Row #3: 23,706\n"
+ "Row #4: 20,179\n"
+ "Row #5: 21,081\n"
+ "Row #6: 21,350\n"
+ "Row #7: 23,763\n"
+ "Row #8: 21,697\n"
+ "Row #9: 20,388\n"
+ "Row #10: 19,958\n"
+ "Row #11: 25,270\n"
+ "Row #12: 26,796\n");
// Alias a tuple. Implicitly becomes set. The error confirms that the
// named set's type is a set of tuples. SSAS gives error "Descendants
// function expects a member or set ..."
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " {([Time].[1997], [Customers].[USA].[CA]) as t,\n"
+ " Descendants(t, [Time].[Month])} on 1\n"
+ "from [Sales]",
"Argument to Descendants function must be a member or set of members, not a set of tuples");
}
public void testAs2() {
// Named set and alias with same name (t) and a second alias (t2).
// Reference to t from within descendants resolves to alias, of type
// [Time], because it is nearer.
final String result =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales], [Gender].[F]}\n"
+ "{[Measures].[Unit Sales], [Gender].[M]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "Row #0: 32,910\n"
+ "Row #0: 33,381\n"
+ "Row #1: 30,992\n"
+ "Row #1: 31,618\n"
+ "Row #2: 32,599\n"
+ "Row #2: 33,249\n"
+ "Row #3: 35,057\n"
+ "Row #3: 36,967\n"
+ "Row #4: 10,932\n"
+ "Row #4: 10,696\n"
+ "Row #5: 10,466\n"
+ "Row #5: 10,884\n"
+ "Row #6: 12,320\n"
+ "Row #6: 12,950\n";
assertQueryReturns(
"with set t as [Gender].Children\n"
+ "select\n"
+ " Measures.[Unit Sales] * t on 0,\n"
+ " {\n"
+ " [Time].[1997].Children as t,\n"
+ " Filter(\n"
+ " Descendants(t, [Time].[Month]) as t2,\n"
+ " Mod(t2.CurrentOrdinal, 5) = 0)\n"
+ " } on 1\n"
+ "from [Sales]",
result);
// Two aliases with same name. OK.
assertQueryReturns(
"select\n"
+ " Measures.[Unit Sales] * [Gender].Children as t on 0,\n"
+ " {[Time].[1997].Children as t,\n"
+ " Filter(\n"
+ " Descendants(t, [Time].[Month]) as t2,\n"
+ " Mod(t2.CurrentOrdinal, 5) = 0)\n"
+ " } on 1\n"
+ "from [Sales]",
result);
// Bug MONDRIAN-648 causes 'AS' to have lower precedence than '*'.
if (Bug.BugMondrian648Fixed) {
// Note that 'as' has higher precedence than '*'.
assertQueryReturns(
"select\n"
+ " Measures.[Unit Sales] * [Gender].Members as t on 0,\n"
+ " {t} on 1\n"
+ "from [Sales]",
"xxxxx");
}
// Reference to hierarchy on other axis.
// On SSAS 2005, finds t, and gives error,
// "The Gender hierarchy already appears in the Axis0 axis."
// On Mondrian, cannot find t. FIXME.
assertQueryThrows(
"select\n"
+ " Measures.[Unit Sales] * ([Gender].Members as t) on 0,\n"
+ " {t} on 1\n"
+ "from [Sales]",
"MDX object 't' not found in cube 'Sales'");
// As above, with parentheses. Tuple valued.
// On SSAS 2005, finds t, and gives error,
// "The Measures hierarchy already appears in the Axis0 axis."
// On Mondrian, cannot find t. FIXME.
assertQueryThrows(
"select\n"
+ " (Measures.[Unit Sales] * [Gender].Members) as t on 0,\n"
+ " {t} on 1\n"
+ "from [Sales]",
"MDX object 't' not found in cube 'Sales'");
// Calculated set, CurrentMember
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S], [Gender].[F]) > 17000) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[M]}\n"
+ "Row #0: 19,958\n"
+ "Row #1: 9,506\n"
+ "Row #2: 10,452\n"
+ "Row #3: 25,270\n"
+ "Row #4: 12,320\n"
+ "Row #5: 12,950\n"
+ "Row #6: 26,796\n"
+ "Row #7: 13,231\n"
+ "Row #8: 13,565\n");
// As above, but don't override [Gender] in filter condition. Note that
// the filter condition is evaluated in the context created by the
// filter set. So, only items with [All Gender] pass the filter.
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 35000) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "Row #0: 19,958\n"
+ "Row #1: 25,270\n"
+ "Row #2: 26,796\n");
// Multiple definitions of alias within same axis
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " generate(\n"
+ " [Marital Status].Children as s,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S], [Gender].[F]) > 17000),\n"
+ " ALL) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[M]}\n"
+ "Row #0: 19,958\n"
+ "Row #1: 9,506\n"
+ "Row #2: 10,452\n"
+ "Row #3: 25,270\n"
+ "Row #4: 12,320\n"
+ "Row #5: 12,950\n"
+ "Row #6: 26,796\n"
+ "Row #7: 13,231\n"
+ "Row #8: 13,565\n"
+ "Row #9: 19,958\n"
+ "Row #10: 9,506\n"
+ "Row #11: 10,452\n"
+ "Row #12: 25,270\n"
+ "Row #13: 12,320\n"
+ "Row #14: 12,950\n"
+ "Row #15: 26,796\n"
+ "Row #16: 13,231\n"
+ "Row #17: 13,565\n");
// Multiple definitions of alias within same axis.
//
// On SSAS 2005, gives error, "The CURRENT function cannot be called in
// current context because the 'x' set is not in scope". SSAS 2005 gives
// same error even if set does not exist.
assertQueryThrows(
"with member Measures.Foo as 'x.Current.Name'\n"
+ "select\n"
+ " {Measures.[Unit Sales], Measures.Foo} on 0,\n"
+ " generate(\n"
+ " [Marital Status].\n"
+ " Children as x,\n"
+ " filter(\n"
+ " Gender.Members as x,\n"
+ " (x.Current, [Marital Status].[S]) > 50000),\n"
+ " ALL) on 1\n"
+ "from [Sales]",
"MDX object 'x' not found in cube 'Sales'");
// As above, but set is not out of scope; it does not exist; but error
// should be the same.
assertQueryThrows(
"with member Measures.Foo as 'z.Current.Name'\n"
+ "select\n"
+ " {Measures.[Unit Sales], Measures.Foo} on 0,\n"
+ " generate(\n"
+ " [Marital Status].\n"
+ " Children as s,\n"
+ " filter(\n"
+ " Gender.Members as s,\n"
+ " (s.Current, [Marital Status].[S]) > 50000),\n"
+ " ALL) on 1\n"
+ "from [Sales]",
"MDX object 'z' not found in cube 'Sales'");
// 'set AS string' is invalid
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as 'foo',\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 50000) on 1\n"
+ "from [Sales]",
"Syntax error at line 3, column 46, token ''foo''");
// 'set AS numeric' is invalid
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as 1234,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 50000) on 1\n"
+ "from [Sales]",
"Syntax error at line 3, column 46, token '1234'");
// 'numeric AS identifier' is invalid
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " 123 * 456 as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 50000) on 1\n"
+ "from [Sales]",
"No function matches signature '<Numeric Expression> AS <Set>'");
}
public void testAscendants() {
assertAxisReturns(
"Ascendants([Store].[USA].[CA])",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]");
}
public void testAscendantsAll() {
assertAxisReturns(
"Ascendants([Store].DefaultMember)", "[Store].[All Stores]");
}
public void testAscendantsNull() {
assertAxisReturns(
"Ascendants([Gender].[F].PrevMember)", "");
}
public void testBottomCount() {
assertAxisReturns(
"BottomCount({[Promotion Media].[Media Type].members}, 2, [Measures].[Unit Sales])",
"[Promotion Media].[Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]");
}
// todo: test unordered
public void testBottomPercent() {
assertAxisReturns(
"BottomPercent(Filter({[Store].[All Stores].[USA].[CA].Children, [Store].[All Stores].[USA].[OR].Children, [Store].[All Stores].[USA].[WA].Children}, ([Measures].[Unit Sales] > 0.0)), 100.0, [Measures].[Store Sales])",
"[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[WA].[Walla Walla]\n"
+ "[Store].[USA].[WA].[Bellingham]\n"
+ "[Store].[USA].[WA].[Yakima]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[WA].[Spokane]\n"
+ "[Store].[USA].[WA].[Seattle]\n"
+ "[Store].[USA].[WA].[Bremerton]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[WA].[Tacoma]\n"
+ "[Store].[USA].[OR].[Salem]");
assertAxisReturns(
"BottomPercent({[Promotion Media].[Media Type].members}, 1, [Measures].[Unit Sales])",
"[Promotion Media].[Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]");
}
// todo: test precision
public void testBottomSum() {
assertAxisReturns(
"BottomSum({[Promotion Media].[Media Type].members}, 5000, [Measures].[Unit Sales])",
"[Promotion Media].[Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]");
}
public void testExceptEmpty() {
// If left is empty, result is empty.
assertAxisReturns(
"Except(Filter([Gender].Members, 1=0), {[Gender].[M]})", "");
// If right is empty, result is left.
assertAxisReturns(
"Except({[Gender].[M]}, Filter([Gender].Members, 1=0))",
"[Gender].[M]");
}
/**
* Tests that Except() successfully removes crossjoined tuples
* from the axis results. Previously, this would fail by returning
* all tuples in the first argument to Except. bug 1439627
*/
public void testExceptCrossjoin() {
assertAxisReturns(
"Except(CROSSJOIN({[Promotion Media].[All Media]},\n"
+ " [Product].[All Products].Children),\n"
+ " CROSSJOIN({[Promotion Media].[All Media]},\n"
+ " {[Product].[All Products].[Drink]}))",
"{[Promotion Media].[All Media], [Product].[Food]}\n"
+ "{[Promotion Media].[All Media], [Product].[Non-Consumable]}");
}
public void testExtract() {
assertAxisReturns(
"Extract(\n"
+ "Crossjoin({[Gender].[F], [Gender].[M]},\n"
+ " {[Marital Status].Members}),\n"
+ "[Gender])",
"[Gender].[F]\n" + "[Gender].[M]");
// Extract(<set>) with no dimensions is not valid
assertAxisThrows(
"Extract(Crossjoin({[Gender].[F], [Gender].[M]}, {[Marital Status].Members}))",
"No function matches signature 'Extract(<Set>)'");
// Extract applied to non-constant dimension should fail
assertAxisThrows(
"Extract(Crossjoin([Gender].Members, [Store].Children), [Store].Hierarchy.Dimension)",
"not a constant hierarchy: [Store].Hierarchy.Dimension");
// Extract applied to non-constant hierarchy should fail
assertAxisThrows(
"Extract(Crossjoin([Gender].Members, [Store].Children), [Store].Hierarchy)",
"not a constant hierarchy: [Store].Hierarchy");
// Extract applied to set of members is OK (if silly). Duplicates are
// removed, as always.
assertAxisReturns(
"Extract({[Gender].[M], [Gender].Members}, [Gender])",
"[Gender].[M]\n"
+ "[Gender].[All Gender]\n"
+ "[Gender].[F]");
// Extract of hierarchy not in set fails
assertAxisThrows(
"Extract(Crossjoin([Gender].Members, [Store].Children), [Marital Status])",
"hierarchy [Marital Status] is not a hierarchy of the expression Crossjoin([Gender].Members, [Store].Children)");
// Extract applied to empty set returns empty set
assertAxisReturns(
"Extract(Crossjoin({[Gender].Parent}, [Store].Children), [Store])",
"");
// Extract applied to asymmetric set
assertAxisReturns(
"Extract(\n"
+ "{([Gender].[M], [Marital Status].[M]),\n"
+ " ([Gender].[F], [Marital Status].[M]),\n"
+ " ([Gender].[M], [Marital Status].[S])},\n"
+ "[Gender])",
"[Gender].[M]\n" + "[Gender].[F]");
// Extract applied to asymmetric set (other side)
assertAxisReturns(
"Extract(\n"
+ "{([Gender].[M], [Marital Status].[M]),\n"
+ " ([Gender].[F], [Marital Status].[M]),\n"
+ " ([Gender].[M], [Marital Status].[S])},\n"
+ "[Marital Status])",
"[Marital Status].[M]\n"
+ "[Marital Status].[S]");
// Extract more than one hierarchy
assertAxisReturns(
"Extract(\n"
+ "[Gender].Children * [Marital Status].Children * [Time].[1997].Children * [Store].[USA].Children,\n"
+ "[Time], [Marital Status])",
"{[Time].[1997].[Q1], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q2], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q3], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q4], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q1], [Marital Status].[S]}\n"
+ "{[Time].[1997].[Q2], [Marital Status].[S]}\n"
+ "{[Time].[1997].[Q3], [Marital Status].[S]}\n"
+ "{[Time].[1997].[Q4], [Marital Status].[S]}");
// Extract duplicate hierarchies fails
assertAxisThrows(
"Extract(\n"
+ "{([Gender].[M], [Marital Status].[M]),\n"
+ " ([Gender].[F], [Marital Status].[M]),\n"
+ " ([Gender].[M], [Marital Status].[S])},\n"
+ "[Gender], [Gender])",
"hierarchy [Gender] is extracted more than once");
}
/**
* Tests that TopPercent() operates succesfully on a
* axis of crossjoined tuples. previously, this would
* fail with a ClassCastException in FunUtil.java. bug 1440306
*/
public void testTopPercentCrossjoin() {
assertAxisReturns(
"{TopPercent(Crossjoin([Product].[Product Department].members,\n"
+ "[Time].[1997].children),10,[Measures].[Store Sales])}",
"{[Product].[Food].[Produce], [Time].[1997].[Q4]}\n"
+ "{[Product].[Food].[Produce], [Time].[1997].[Q1]}\n"
+ "{[Product].[Food].[Produce], [Time].[1997].[Q3]}");
}
public void testCrossjoinNested() {
assertAxisReturns(
" CrossJoin(\n"
+ " CrossJoin(\n"
+ " [Gender].members,\n"
+ " [Marital Status].members),\n"
+ " {[Store], [Store].children})",
"{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[All Stores]}\n"
+ "{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[Canada]}\n"
+ "{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[Mexico]}\n"
+ "{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[USA]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[All Stores]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[Canada]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[Mexico]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[USA]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[All Stores]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[USA]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[All Stores]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[Canada]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[Mexico]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[USA]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[All Stores]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[Canada]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[Mexico]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[USA]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[All Stores]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[USA]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[All Stores]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[USA]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[All Stores]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[USA]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[All Stores]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[USA]}");
}
public void testCrossjoinSingletonTuples() {
assertAxisReturns(
"CrossJoin({([Gender].[M])}, {([Marital Status].[S])})",
"{[Gender].[M], [Marital Status].[S]}");
}
public void testCrossjoinSingletonTuplesNested() {
assertAxisReturns(
"CrossJoin({([Gender].[M])}, CrossJoin({([Marital Status].[S])}, [Store].children))",
"{[Gender].[M], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[USA]}");
}
public void testCrossjoinAsterisk() {
assertAxisReturns(
"{[Gender].[M]} * {[Marital Status].[S]}",
"{[Gender].[M], [Marital Status].[S]}");
}
public void testCrossjoinAsteriskTuple() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY [Store].[All Stores] "
+ " * ([Product].[All Products], [Gender]) "
+ " * [Customers].[All Customers] ON ROWS "
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[All Stores], [Product].[All Products], [Gender].[All Gender], [Customers].[All Customers]}\n"
+ "Row #0: 266,773\n");
}
public void testCrossjoinAsteriskAssoc() {
assertAxisReturns(
"Order({[Gender].Children} * {[Marital Status].Children} * {[Time].[1997].[Q2].Children},"
+ "[Measures].[Unit Sales])",
"{[Gender].[F], [Marital Status].[M], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[5]}");
}
public void testCrossjoinAsteriskInsideBraces() {
assertAxisReturns(
"{[Gender].[M] * [Marital Status].[S] * [Time].[1997].[Q2].Children}",
"{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[6]}");
}
public void testCrossJoinAsteriskQuery() {
assertQueryReturns(
"SELECT {[Measures].members * [1997].children} ON COLUMNS,\n"
+ " {[Store].[USA].children * [Position].[All Position].children} DIMENSION PROPERTIES [Store].[Store SQFT] ON ROWS\n"
+ "FROM [HR]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q1]}\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q2]}\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q3]}\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q4]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q1]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q2]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q3]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q4]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q1]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q2]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q3]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q4]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA], [Position].[Middle Management]}\n"
+ "{[Store].[USA].[CA], [Position].[Senior Management]}\n"
+ "{[Store].[USA].[CA], [Position].[Store Full Time Staf]}\n"
+ "{[Store].[USA].[CA], [Position].[Store Management]}\n"
+ "{[Store].[USA].[CA], [Position].[Store Temp Staff]}\n"
+ "{[Store].[USA].[OR], [Position].[Middle Management]}\n"
+ "{[Store].[USA].[OR], [Position].[Senior Management]}\n"
+ "{[Store].[USA].[OR], [Position].[Store Full Time Staf]}\n"
+ "{[Store].[USA].[OR], [Position].[Store Management]}\n"
+ "{[Store].[USA].[OR], [Position].[Store Temp Staff]}\n"
+ "{[Store].[USA].[WA], [Position].[Middle Management]}\n"
+ "{[Store].[USA].[WA], [Position].[Senior Management]}\n"
+ "{[Store].[USA].[WA], [Position].[Store Full Time Staf]}\n"
+ "{[Store].[USA].[WA], [Position].[Store Management]}\n"
+ "{[Store].[USA].[WA], [Position].[Store Temp Staff]}\n"
+ "Row #0: $275.40\n"
+ "Row #0: $275.40\n"
+ "Row #0: $275.40\n"
+ "Row #0: $275.40\n"
+ "Row #0: 27\n"
+ "Row #0: 27\n"
+ "Row #0: 27\n"
+ "Row #0: 27\n"
+ "Row #0: 9\n"
+ "Row #0: 9\n"
+ "Row #0: 9\n"
+ "Row #0: 9\n"
+ "Row #1: $837.00\n"
+ "Row #1: $837.00\n"
+ "Row #1: $837.00\n"
+ "Row #1: $837.00\n"
+ "Row #1: 24\n"
+ "Row #1: 24\n"
+ "Row #1: 24\n"
+ "Row #1: 24\n"
+ "Row #1: 8\n"
+ "Row #1: 8\n"
+ "Row #1: 8\n"
+ "Row #1: 8\n"
+ "Row #2: $1,728.45\n"
+ "Row #2: $1,727.02\n"
+ "Row #2: $1,727.72\n"
+ "Row #2: $1,726.55\n"
+ "Row #2: 357\n"
+ "Row #2: 357\n"
+ "Row #2: 357\n"
+ "Row #2: 357\n"
+ "Row #2: 119\n"
+ "Row #2: 119\n"
+ "Row #2: 119\n"
+ "Row #2: 119\n"
+ "Row #3: $473.04\n"
+ "Row #3: $473.04\n"
+ "Row #3: $473.04\n"
+ "Row #3: $473.04\n"
+ "Row #3: 51\n"
+ "Row #3: 51\n"
+ "Row #3: 51\n"
+ "Row #3: 51\n"
+ "Row #3: 17\n"
+ "Row #3: 17\n"
+ "Row #3: 17\n"
+ "Row #3: 17\n"
+ "Row #4: $401.35\n"
+ "Row #4: $405.73\n"
+ "Row #4: $400.61\n"
+ "Row #4: $402.31\n"
+ "Row #4: 120\n"
+ "Row #4: 120\n"
+ "Row #4: 120\n"
+ "Row #4: 120\n"
+ "Row #4: 40\n"
+ "Row #4: 40\n"
+ "Row #4: 40\n"
+ "Row #4: 40\n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #7: $1,343.62\n"
+ "Row #7: $1,342.61\n"
+ "Row #7: $1,342.57\n"
+ "Row #7: $1,343.65\n"
+ "Row #7: 279\n"
+ "Row #7: 279\n"
+ "Row #7: 279\n"
+ "Row #7: 279\n"
+ "Row #7: 93\n"
+ "Row #7: 93\n"
+ "Row #7: 93\n"
+ "Row #7: 93\n"
+ "Row #8: $286.74\n"
+ "Row #8: $286.74\n"
+ "Row #8: $286.74\n"
+ "Row #8: $286.74\n"
+ "Row #8: 30\n"
+ "Row #8: 30\n"
+ "Row #8: 30\n"
+ "Row #8: 30\n"
+ "Row #8: 10\n"
+ "Row #8: 10\n"
+ "Row #8: 10\n"
+ "Row #8: 10\n"
+ "Row #9: $333.20\n"
+ "Row #9: $332.65\n"
+ "Row #9: $331.28\n"
+ "Row #9: $332.43\n"
+ "Row #9: 99\n"
+ "Row #9: 99\n"
+ "Row #9: 99\n"
+ "Row #9: 99\n"
+ "Row #9: 33\n"
+ "Row #9: 33\n"
+ "Row #9: 33\n"
+ "Row #9: 33\n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #12: $2,768.60\n"
+ "Row #12: $2,769.18\n"
+ "Row #12: $2,766.78\n"
+ "Row #12: $2,769.50\n"
+ "Row #12: 579\n"
+ "Row #12: 579\n"
+ "Row #12: 579\n"
+ "Row #12: 579\n"
+ "Row #12: 193\n"
+ "Row #12: 193\n"
+ "Row #12: 193\n"
+ "Row #12: 193\n"
+ "Row #13: $736.29\n"
+ "Row #13: $736.29\n"
+ "Row #13: $736.29\n"
+ "Row #13: $736.29\n"
+ "Row #13: 81\n"
+ "Row #13: 81\n"
+ "Row #13: 81\n"
+ "Row #13: 81\n"
+ "Row #13: 27\n"
+ "Row #13: 27\n"
+ "Row #13: 27\n"
+ "Row #13: 27\n"
+ "Row #14: $674.70\n"
+ "Row #14: $674.54\n"
+ "Row #14: $676.26\n"
+ "Row #14: $676.48\n"
+ "Row #14: 201\n"
+ "Row #14: 201\n"
+ "Row #14: 201\n"
+ "Row #14: 201\n"
+ "Row #14: 67\n"
+ "Row #14: 67\n"
+ "Row #14: 67\n"
+ "Row #14: 67\n");
}
/**
* Testcase for bug 1889745, "StackOverflowError while resolving
* crossjoin". The problem occurs when a calculated member that references
* itself is referenced in a crossjoin.
*/
public void testCrossjoinResolve() {
assertQueryReturns(
"with\n"
+ "member [Measures].[Filtered Unit Sales] as\n"
+ " 'IIf((([Measures].[Unit Sales] > 50000.0)\n"
+ " OR ([Product].CurrentMember.Level.UniqueName <>\n"
+ " \"[Product].[Product Family]\")),\n"
+ " IIf(((Count([Product].CurrentMember.Children) = 0.0)),\n"
+ " [Measures].[Unit Sales],\n"
+ " Sum([Product].CurrentMember.Children,\n"
+ " [Measures].[Filtered Unit Sales])),\n"
+ " NULL)'\n"
+ "select NON EMPTY {crossjoin({[Measures].[Filtered Unit Sales]},\n"
+ "{[Gender].[M], [Gender].[F]})} ON COLUMNS,\n"
+ "NON EMPTY {[Product].[All Products]} ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997]",
"Axis #0:\n"
+ "{[Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Filtered Unit Sales], [Gender].[M]}\n"
+ "{[Measures].[Filtered Unit Sales], [Gender].[F]}\n"
+ "Axis #2:\n"
+ "{[Product].[All Products]}\n"
+ "Row #0: 97,126\n"
+ "Row #0: 94,814\n");
}
/**
* Test case for bug 1911832, "Exception converting immutable list to array
* in JDK 1.5".
*/
public void testCrossjoinOrder() {
assertQueryReturns(
"WITH\n"
+ "\n"
+ "SET [S1] AS 'CROSSJOIN({[Time].[1997]}, {[Gender].[Gender].MEMBERS})'\n"
+ "\n"
+ "SELECT CROSSJOIN(ORDER([S1], [Measures].[Unit Sales], BDESC),\n"
+ "{[Measures].[Unit Sales]}) ON AXIS(0)\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997], [Gender].[M], [Measures].[Unit Sales]}\n"
+ "{[Time].[1997], [Gender].[F], [Measures].[Unit Sales]}\n"
+ "Row #0: 135,215\n"
+ "Row #0: 131,558\n");
}
public void testCrossjoinDupHierarchyFails() {
assertQueryThrows(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {[Time].[Month].[5]}) ON ROWS\n"
+ "from [Sales]",
"Tuple contains more than one member of hierarchy '[Time]'.");
// now with Item, for kicks
assertQueryThrows(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {[Time].[Month].[5]}).Item(0) ON ROWS\n"
+ "from [Sales]",
"Tuple contains more than one member of hierarchy '[Time]'.");
// same query using explicit tuple
assertQueryThrows(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " ([Time].[Quarter].[Q1], [Time].[Month].[5]) ON ROWS\n"
+ "from [Sales]",
"Tuple contains more than one member of hierarchy '[Time]'.");
}
/**
* Tests cases of different hierarchies in the same dimension.
* (Compare to {@link #testCrossjoinDupHierarchyFails()}). Not an error.
*/
public void testCrossjoinDupDimensionOk() {
final String expectedResult =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1], [Time].[Weekly].[1997].[10]}\n"
+ "Row #0: 4,395\n";
final String timeWeekly = TestContext.hierarchyName("Time", "Weekly");
assertQueryReturns(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {"
+ timeWeekly + ".[1997].[10]}) ON ROWS\n"
+ "from [Sales]",
expectedResult);
// now with Item, for kicks
assertQueryReturns(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {"
+ timeWeekly + ".[1997].[10]}).Item(0) ON ROWS\n"
+ "from [Sales]",
expectedResult);
// same query using explicit tuple
assertQueryReturns(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " ([Time].[Quarter].[Q1], "
+ timeWeekly + ".[1997].[10]) ON ROWS\n"
+ "from [Sales]",
expectedResult);
}
public void testDescendantsM() {
assertAxisReturns(
"Descendants([Time].[1997].[Q1])",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]");
}
public void testDescendantsDepends() {
getTestContext().assertSetExprDependsOn(
"Descendants([Time].[Time].CurrentMember)",
"{[Time]}");
}
public void testDescendantsML() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Month])",
months);
}
public void testDescendantsMLSelf() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], SELF)",
quarters);
}
public void testDescendantsMLLeaves() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Year], LEAVES)",
"");
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], LEAVES)",
"");
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Month], LEAVES)",
months);
assertAxisReturns(
"Descendants([Gender], [Gender].[Gender], leaves)",
"[Gender].[F]\n" + "[Gender].[M]");
}
public void testDescendantsMLLeavesRagged() {
// no cities are at leaf level
final TestContext raggedContext =
getTestContext().withCube("[Sales Ragged]");
raggedContext.assertAxisReturns(
"Descendants([Store].[Israel], [Store].[Store City], leaves)",
"");
// all cities are leaves
raggedContext.assertAxisReturns(
"Descendants([Geography].[Israel], [Geography].[City], leaves)",
"[Geography].[Israel].[Israel].[Haifa]\n"
+ "[Geography].[Israel].[Israel].[Tel Aviv]");
// No state is a leaf (not even Israel, which is both a country and a
// a state, or Vatican, with is a country/state/city)
raggedContext.assertAxisReturns(
"Descendants([Geography], [Geography].[State], leaves)",
"");
// The Vatican is a nation with no children (they're all celibate,
// you know).
raggedContext.assertAxisReturns(
"Descendants([Geography], [Geography].[Country], leaves)",
"[Geography].[Vatican]");
}
public void testDescendantsMNLeaves() {
// leaves at depth 0 returns the member itself
assertAxisReturns(
"Descendants([Time].[1997].[Q2].[4], 0, Leaves)",
"[Time].[1997].[Q2].[4]");
// leaves at depth > 0 returns the member itself
assertAxisReturns(
"Descendants([Time].[1997].[Q2].[4], 100, Leaves)",
"[Time].[1997].[Q2].[4]");
// leaves at depth < 0 returns all descendants
assertAxisReturns(
"Descendants([Time].[1997].[Q2], -1, Leaves)",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
// leaves at depth 0 returns the member itself
assertAxisReturns(
"Descendants([Time].[1997].[Q2], 0, Leaves)",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
assertAxisReturns(
"Descendants([Time].[1997].[Q2], 3, Leaves)",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
}
public void testDescendantsMLSelfBefore() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], SELF_AND_BEFORE)",
year1997 + "\n" + quarters);
}
public void testDescendantsMLSelfBeforeAfter() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], SELF_BEFORE_AFTER)",
hierarchized1997);
}
public void testDescendantsMLBefore() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], BEFORE)", year1997);
}
public void testDescendantsMLBeforeAfter() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], BEFORE_AND_AFTER)",
year1997 + "\n" + months);
}
public void testDescendantsMLAfter() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], AFTER)", months);
}
public void testDescendantsMLAfterEnd() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Month], AFTER)", "");
}
public void testDescendantsM0() {
assertAxisReturns(
"Descendants([Time].[1997], 0)", year1997);
}
public void testDescendantsM2() {
assertAxisReturns(
"Descendants([Time].[1997], 2)", months);
}
public void testDescendantsM2Self() {
assertAxisReturns(
"Descendants([Time].[1997], 2, Self)", months);
}
public void testDescendantsM2Leaves() {
assertAxisReturns(
"Descendants([Time].[1997], 2, Leaves)", months);
}
public void testDescendantsMFarLeaves() {
assertAxisReturns(
"Descendants([Time].[1997], 10000, Leaves)", months);
}
public void testDescendantsMEmptyLeaves() {
assertAxisReturns(
"Descendants([Time].[1997], , Leaves)",
months);
}
public void testDescendantsMEmptyLeavesFail() {
assertAxisThrows(
"Descendants([Time].[1997],)",
"No function matches signature 'Descendants(<Member>, <Empty>)");
}
public void testDescendantsMEmptyLeavesFail2() {
assertAxisThrows(
"Descendants([Time].[1997], , AFTER)",
"depth must be specified unless DESC_FLAG is LEAVES");
}
public void testDescendantsMFarSelf() {
assertAxisReturns(
"Descendants([Time].[1997], 10000, Self)",
"");
}
public void testDescendantsMNY() {
assertAxisReturns(
"Descendants([Time].[1997], 1, BEFORE_AND_AFTER)",
year1997 + "\n" + months);
}
public void testDescendants2ndHier() {
assertAxisReturns(
"Descendants([Time.Weekly].[1997].[10], [Time.Weekly].[Day])",
"[Time].[Weekly].[1997].[10].[1]\n"
+ "[Time].[Weekly].[1997].[10].[23]\n"
+ "[Time].[Weekly].[1997].[10].[24]\n"
+ "[Time].[Weekly].[1997].[10].[25]\n"
+ "[Time].[Weekly].[1997].[10].[26]\n"
+ "[Time].[Weekly].[1997].[10].[27]\n"
+ "[Time].[Weekly].[1997].[10].[28]");
}
public void testDescendantsParentChild() {
getTestContext().withCube("HR").assertAxisReturns(
"Descendants([Employees], 2)",
"[Employees].[Sheri Nowmer].[Derrick Whelply]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence]\n"
+ "[Employees].[Sheri Nowmer].[Maya Gutierrez]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold]");
}
public void testDescendantsParentChildBefore() {
getTestContext().withCube("HR").assertAxisReturns(
"Descendants([Employees], 2, BEFORE)",
"[Employees].[All Employees]\n"
+ "[Employees].[Sheri Nowmer]");
}
public void testDescendantsParentChildLeaves() {
final TestContext testContext = getTestContext().withCube("HR");
if (Bug.avoidSlowTestOnLucidDB(testContext.getDialect())) {
return;
}
// leaves, restricted by level
testContext.assertAxisReturns(
"Descendants([Employees].[All Employees].[Sheri Nowmer].[Michael Spence], [Employees].[Employee Id], LEAVES)",
"[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[John Brooks]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Todd Logan]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Joshua Several]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[James Thomas]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Robert Vessa]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Bronson Jacobs]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Rebecca Barley]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Emilio Alvaro]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Becky Waters]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[A. Joyce Jarvis]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Ruby Sue Styles]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Lisa Roy]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Ingrid Burkhardt]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Todd Whitney]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Barbara Wisnewski]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Karren Burkhardt]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[John Long]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Edwin Olenzek]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Jessie Valerio]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Robert Ahlering]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Megan Burke]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Karel Bates]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[James Tran]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Shelley Crow]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Anne Sims]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Clarence Tatman]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Jan Nelsen]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Jeanie Glenn]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Peggy Smith]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Tish Duff]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Anita Lucero]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Stephen Burton]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Amy Consentino]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Stacie Mcanich]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Mary Browning]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Alexandra Wellington]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Cory Bacugalupi]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Stacy Rizzi]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Mike White]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Marty Simpson]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Robert Jones]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Raul Casts]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Bridget Browqett]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Kay Kartz]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Jeanette Cole]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Phyllis Huntsman]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Hannah Arakawa]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Wathalee Steuber]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Pamela Cox]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Helen Lutes]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Linda Ecoffey]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Katherine Swint]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Dianne Slattengren]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Ronald Heymsfield]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Steven Whitehead]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[William Sotelo]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Beth Stanley]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Jill Markwood]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Mildred Valentine]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Suzann Reams]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Audrey Wold]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Susan French]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Trish Pederson]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Eric Renn]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Elizabeth Catalano]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Eric Coleman]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Catherine Abel]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Emilo Miller]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Hazel Walker]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Linda Blasingame]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Jackie Blackwell]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[John Ortiz]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Stacey Tearpak]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Fannye Weber]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Diane Kabbes]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Brenda Heaney]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Judith Karavites]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Jauna Elson]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Nancy Hirota]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Marie Moya]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Nicky Chesnut]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Karen Hall]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Greg Narberes]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Anna Townsend]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Carol Ann Rockne]");
// leaves, restricted by depth
testContext.assertAxisReturns(
"Descendants([Employees], 1, LEAVES)", "");
testContext.assertAxisReturns(
"Descendants([Employees], 2, LEAVES)",
"[Employees].[Sheri Nowmer].[Roberta Damstra].[Jennifer Cooper]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Peggy Petty]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Jessica Olguin]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Phyllis Burchett]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Juanita Sharp]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Sandra Brunner]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Ernest Staton]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Rose Sims]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Lauretta De Carlo]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Mary Williams]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Terri Burke]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Audrey Osborn]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Brian Binai]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Concepcion Lozada]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Doris Carter]");
testContext.assertAxisReturns(
"Descendants([Employees], 3, LEAVES)",
"[Employees].[Sheri Nowmer].[Roberta Damstra].[Jennifer Cooper]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Peggy Petty]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Jessica Olguin]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Phyllis Burchett]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Juanita Sharp]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Sandra Brunner]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Ernest Staton]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Rose Sims]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Lauretta De Carlo]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Mary Williams]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Terri Burke]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Audrey Osborn]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Brian Binai]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Concepcion Lozada]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Doris Carter]");
// note that depth is RELATIVE to the starting member
testContext.assertAxisReturns(
"Descendants([Employees].[Sheri Nowmer].[Roberta Damstra], 1, LEAVES)",
"[Employees].[Sheri Nowmer].[Roberta Damstra].[Jennifer Cooper]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Peggy Petty]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Jessica Olguin]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Phyllis Burchett]");
// Howard Bechard is a leaf member -- appears even at depth 0
testContext.assertAxisReturns(
"Descendants([Employees].[All Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard], 0, LEAVES)",
"[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]");
testContext.assertAxisReturns(
"Descendants([Employees].[All Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard], 1, LEAVES)",
"[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]");
testContext.assertExprReturns(
"Count(Descendants([Employees], 2, LEAVES))", "16");
testContext.assertExprReturns(
"Count(Descendants([Employees], 3, LEAVES))", "16");
testContext.assertExprReturns(
"Count(Descendants([Employees], 4, LEAVES))", "63");
testContext.assertExprReturns(
"Count(Descendants([Employees], 999, LEAVES))", "1,044");
// Negative depth acts like +infinity (per MSAS). Run the test several
// times because we had a non-deterministic bug here.
for (int i = 0; i < 100; ++i) {
testContext.assertExprReturns(
"Count(Descendants([Employees], -1, LEAVES))", "1,044");
}
}
public void testDescendantsSBA() {
assertAxisReturns(
"Descendants([Time].[1997], 1, SELF_BEFORE_AFTER)",
hierarchized1997);
}
public void testDescendantsSet() {
assertAxisReturns(
"Descendants({[Time].[1997].[Q4], [Time].[1997].[Q2]}, 1)",
"[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
assertAxisReturns(
"Descendants({[Time].[1997]}, [Time].[Month], LEAVES)",
months);
}
public void testDescendantsSetEmpty() {
assertAxisThrows(
"Descendants({}, 1)",
"Cannot deduce type of set");
assertAxisReturns(
"Descendants(Filter({[Time].[Time].Members}, 1=0), 1)",
"");
}
public void testRange() {
assertAxisReturns(
"[Time].[1997].[Q1].[2] : [Time].[1997].[Q2].[5]",
"[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]"); // not parents
// testcase for bug XXXXX: braces required
assertQueryReturns(
"with set [Set1] as '[Product].[Drink]:[Product].[Food]' \n"
+ "\n"
+ "select [Set1] on columns, {[Measures].defaultMember} on rows \n"
+ "\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Food]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Row #0: 24,597\n"
+ "Row #0: 191,940\n");
}
/**
* tests that a null passed in returns an empty set in range function
*/
public void testNullRange() {
assertAxisReturns(
"[Time].[1997].[Q1].[2] : NULL", //[Time].[1997].[Q2].[5]
""); // Empty Set
}
/**
* tests that an exception is thrown if both parameters in a range function
* are null.
*/
public void testTwoNullRange() {
assertAxisThrows(
"NULL : NULL",
"Mondrian Error:Failed to parse query 'select {NULL : NULL} on columns from Sales'");
}
/**
* Large dimensions use a different member reader, therefore need to
* be tested separately.
*/
public void testRangeLarge() {
assertAxisReturns(
"[Customers].[USA].[CA].[San Francisco] : [Customers].[USA].[WA].[Bellingham]",
"[Customers].[USA].[CA].[San Francisco]\n"
+ "[Customers].[USA].[CA].[San Gabriel]\n"
+ "[Customers].[USA].[CA].[San Jose]\n"
+ "[Customers].[USA].[CA].[Santa Cruz]\n"
+ "[Customers].[USA].[CA].[Santa Monica]\n"
+ "[Customers].[USA].[CA].[Spring Valley]\n"
+ "[Customers].[USA].[CA].[Torrance]\n"
+ "[Customers].[USA].[CA].[West Covina]\n"
+ "[Customers].[USA].[CA].[Woodland Hills]\n"
+ "[Customers].[USA].[OR].[Albany]\n"
+ "[Customers].[USA].[OR].[Beaverton]\n"
+ "[Customers].[USA].[OR].[Corvallis]\n"
+ "[Customers].[USA].[OR].[Lake Oswego]\n"
+ "[Customers].[USA].[OR].[Lebanon]\n"
+ "[Customers].[USA].[OR].[Milwaukie]\n"
+ "[Customers].[USA].[OR].[Oregon City]\n"
+ "[Customers].[USA].[OR].[Portland]\n"
+ "[Customers].[USA].[OR].[Salem]\n"
+ "[Customers].[USA].[OR].[W. Linn]\n"
+ "[Customers].[USA].[OR].[Woodburn]\n"
+ "[Customers].[USA].[WA].[Anacortes]\n"
+ "[Customers].[USA].[WA].[Ballard]\n"
+ "[Customers].[USA].[WA].[Bellingham]");
}
public void testRangeStartEqualsEnd() {
assertAxisReturns(
"[Time].[1997].[Q3].[7] : [Time].[1997].[Q3].[7]",
"[Time].[1997].[Q3].[7]");
}
public void testRangeStartEqualsEndLarge() {
assertAxisReturns(
"[Customers].[USA].[CA] : [Customers].[USA].[CA]",
"[Customers].[USA].[CA]");
}
public void testRangeEndBeforeStart() {
assertAxisReturns(
"[Time].[1997].[Q3].[7] : [Time].[1997].[Q2].[5]",
"[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]"); // same as if reversed
}
public void testRangeEndBeforeStartLarge() {
assertAxisReturns(
"[Customers].[USA].[WA] : [Customers].[USA].[CA]",
"[Customers].[USA].[CA]\n"
+ "[Customers].[USA].[OR]\n"
+ "[Customers].[USA].[WA]");
}
public void testRangeBetweenDifferentLevelsIsError() {
assertAxisThrows(
"[Time].[1997].[Q2] : [Time].[1997].[Q2].[5]",
"Members must belong to the same level");
}
public void testRangeBoundedByAll() {
assertAxisReturns(
"[Gender] : [Gender]",
"[Gender].[All Gender]");
}
public void testRangeBoundedByAllLarge() {
assertAxisReturns(
"[Customers].DefaultMember : [Customers]",
"[Customers].[All Customers]");
}
public void testRangeBoundedByNull() {
assertAxisReturns(
"[Gender].[F] : [Gender].[M].NextMember",
"");
}
public void testRangeBoundedByNullLarge() {
assertAxisReturns(
"[Customers].PrevMember : [Customers].[USA].[OR]",
"");
}
public void testSetContainingLevelFails() {
assertAxisThrows(
"[Store].[Store City]",
"No function matches signature '{<Level>}'");
}
public void testBug715177() {
assertQueryReturns(
"WITH MEMBER [Product].[Non-Consumable].[Other] AS\n"
+ " 'Sum(Except( [Product].[Product Department].Members,\n"
+ " TopCount([Product].[Product Department].Members, 3)),\n"
+ " Measures.[Unit Sales])'\n"
+ "SELECT\n"
+ " { [Measures].[Unit Sales] } ON COLUMNS,\n"
+ " { TopCount([Product].[Product Department].Members,3),\n"
+ " [Product].[Non-Consumable].[Other] } ON ROWS\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Non-Consumable].[Other]}\n"
+ "Row #0: 6,838\n"
+ "Row #1: 13,573\n"
+ "Row #2: 4,186\n"
+ "Row #3: 242,176\n");
}
public void testBug714707() {
// Same issue as bug 715177 -- "children" returns immutable
// list, which set operator must make mutable.
assertAxisReturns(
"{[Store].[USA].[CA].children, [Store].[USA]}",
"[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA]");
}
public void testBug715177c() {
assertAxisReturns(
"Order(TopCount({[Store].[USA].[CA].children},"
+ " [Measures].[Unit Sales], 2), [Measures].[Unit Sales])",
"[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[Los Angeles]");
}
public void testFormatFixed() {
assertExprReturns(
"Format(12.2, \"#,##0.00\")",
"12.20");
}
public void testFormatVariable() {
assertExprReturns(
"Format(1234.5, \"#,#\" || \"#0.00\")",
"1,234.50");
}
public void testFormatMember() {
assertExprReturns(
"Format([Store].[USA].[CA], \"#,#\" || \"#0.00\")",
"74,748.00");
}
public void testIIf() {
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, \"Yes\",\"No\")",
"Yes");
}
public void testIIfWithNullAndNumber() {
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, null,20)",
"");
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, 20,null)",
"20");
}
public void testIIfWithStringAndNull()
{
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, null,\"foo\")",
"");
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, \"foo\",null)",
"foo");
}
public void testIsEmptyWithNull()
{
assertExprReturns(
"iif (isempty(null), \"is empty\", \"not is empty\")",
"is empty");
assertExprReturns("iif (isempty(null), 1, 2)", "1");
}
public void testIIfMember() {
assertAxisReturns(
"IIf(1 > 2,[Store].[USA],[Store].[Canada].[BC])",
"[Store].[Canada].[BC]");
}
public void testIIfLevel() {
assertExprReturns(
"IIf(1 > 2, [Store].[Store Country],[Store].[Store City]).Name",
"Store City");
}
public void testIIfHierarchy() {
assertExprReturns(
"IIf(1 > 2, [Time], [Store]).Name",
"Store");
// Call Iif(<Logical>, <Dimension>, <Hierarchy>). Argument #3, the
// hierarchy [Time.Weekly] is implicitly converted to
// the dimension [Time] to match argument #2 which is a dimension.
assertExprReturns(
"IIf(1 > 2, [Time], [Time.Weekly]).Name",
"Time");
}
public void testIIfDimension() {
assertExprReturns(
"IIf(1 > 2, [Store], [Time]).Name",
"Time");
}
public void testIIfSet() {
assertAxisReturns(
"IIf(1 > 2, {[Store].[USA], [Store].[USA].[CA]}, {[Store].[Mexico], [Store].[USA].[OR]})",
"[Store].[Mexico]\n"
+ "[Store].[USA].[OR]");
}
public void testDimensionCaption() {
assertExprReturns("[Time].[1997].Dimension.Caption", "Time");
}
public void testHierarchyCaption() {
assertExprReturns("[Time].[1997].Hierarchy.Caption", "Time");
}
public void testLevelCaption() {
assertExprReturns("[Time].[1997].Level.Caption", "Year");
}
public void testMemberCaption() {
assertExprReturns("[Time].[1997].Caption", "1997");
}
public void testDimensionName() {
assertExprReturns("[Time].[1997].Dimension.Name", "Time");
}
public void testHierarchyName() {
assertExprReturns("[Time].[1997].Hierarchy.Name", "Time");
}
public void testLevelName() {
assertExprReturns("[Time].[1997].Level.Name", "Year");
}
public void testMemberName() {
assertExprReturns("[Time].[1997].Name", "1997");
// dimension name
assertExprReturns("[Store].Name", "Store");
// member name
assertExprReturns("[Store].DefaultMember.Name", "All Stores");
if (isDefaultNullMemberRepresentation()) {
// name of null member
assertExprReturns("[Store].Parent.Name", "#null");
}
}
public void testDimensionUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.Dimension.UniqueName",
"[Gender]");
}
public void testHierarchyUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.Hierarchy.UniqueName",
"[Gender]");
}
public void testLevelUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.Level.UniqueName",
"[Gender].[(All)]");
}
public void testMemberUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.UniqueName",
"[Gender].[All Gender]");
}
public void testMemberUniqueNameOfNull() {
if (isDefaultNullMemberRepresentation()) {
assertExprReturns(
"[Measures].[Unit Sales].FirstChild.UniqueName",
"[Measures].[#null]"); // MSOLAP gives "" here
}
}
public void testCoalesceEmptyDepends() {
getTestContext().assertExprDependsOn(
"coalesceempty([Time].[1997], [Gender].[M])",
TestContext.allHiers());
String s1 = TestContext.allHiersExcept("[Measures]", "[Time]");
getTestContext().assertExprDependsOn(
"coalesceempty(([Measures].[Unit Sales], [Time].[1997]),"
+ " ([Measures].[Store Sales], [Time].[1997].[Q2]))",
s1);
}
public void testCoalesceEmpty() {
// [DF] is all null and [WA] has numbers for 1997 but not for 1998.
Result result = executeQuery(
"with\n"
+ " member Measures.[Coal1] as 'coalesceempty(([Time].[1997], Measures.[Store Sales]), ([Time].[1998], Measures.[Store Sales]))'\n"
+ " member Measures.[Coal2] as 'coalesceempty(([Time].[1997], Measures.[Unit Sales]), ([Time].[1998], Measures.[Unit Sales]))'\n"
+ "select \n"
+ " {Measures.[Coal1], Measures.[Coal2]} on columns,\n"
+ " {[Store].[All Stores].[Mexico].[DF], [Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]");
checkDataResults(
new Double[][]{
new Double[]{null, null},
new Double[]{new Double(263793.22), new Double(124366)}
},
result,
0.001);
result = executeQuery(
"with\n"
+ " member Measures.[Sales Per Customer] as 'Measures.[Sales Count] / Measures.[Customer Count]'\n"
+ " member Measures.[Coal] as 'coalesceempty(([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " Measures.[Sales Per Customer])'\n"
+ "select \n"
+ " {Measures.[Sales Per Customer], Measures.[Coal]} on columns,\n"
+ " {[Store].[All Stores].[Mexico].[DF], [Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]\n"
+ "where\n"
+ " ([Time].[1997].[Q2])");
checkDataResults(
new Double[][]{
new Double[]{null, null},
new Double[]{new Double(8.963), new Double(8.963)}
},
result,
0.001);
result = executeQuery(
"with\n"
+ " member Measures.[Sales Per Customer] as 'Measures.[Sales Count] / Measures.[Customer Count]'\n"
+ " member Measures.[Coal] as 'coalesceempty(([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " Measures.[Sales Per Customer])'\n"
+ "select \n"
+ " {Measures.[Sales Per Customer], Measures.[Coal]} on columns,\n"
+ " {[Store].[All Stores].[Mexico].[DF], [Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]\n"
+ "where\n"
+ " ([Time].[1997].[Q2])");
checkDataResults(
new Double[][]{
new Double[]{null, null},
new Double[]{new Double(8.963), new Double(8.963)}
},
result,
0.001);
}
public void testBrokenContextBug() {
Result result = executeQuery(
"with\n"
+ " member Measures.[Sales Per Customer] as 'Measures.[Sales Count] / Measures.[Customer Count]'\n"
+ " member Measures.[Coal] as 'coalesceempty(([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " Measures.[Sales Per Customer])'\n"
+ "select \n"
+ " {Measures.[Coal]} on columns,\n"
+ " {[Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]\n"
+ "where\n"
+ " ([Time].[1997].[Q2])");
checkDataResults(new Double[][]{{new Double(8.963)}}, result, 0.001);
}
/**
* Tests the function <code><Set>.Item(<Integer>)</code>.
*/
public void testSetItemInt() {
assertAxisReturns(
"{[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(0)",
"[Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]");
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(2)",
"[Customers].[USA].[CA]");
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(100 / 50 - 1)",
"[Customers].[USA].[WA]");
assertAxisReturns(
"{([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA]),"
+ "([Time].[1997].[Q1].[2], [Customers].[All Customers].[USA].[WA]),"
+ "([Time].[1997].[Q1].[3], [Customers].[All Customers].[USA].[CA]),"
+ "([Time].[1997].[Q2].[4], [Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian])}"
+ ".Item(100 / 50 - 1)",
"{[Time].[1997].[Q1].[2], [Customers].[USA].[WA]}");
// given index out of bounds, item returns null
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(-1)",
"");
// given index out of bounds, item returns null
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(4)",
"");
}
/**
* Tests the function <code><Set>.Item(<String> [,...])</code>.
*/
public void testSetItemString() {
assertAxisReturns(
"{[Gender].[M], [Gender].[F]}.Item(\"M\")",
"[Gender].[M]");
assertAxisReturns(
"{CrossJoin([Gender].Members, [Marital Status].Members)}.Item(\"M\", \"S\")",
"{[Gender].[M], [Marital Status].[S]}");
// MSAS fails with "duplicate dimensions across (independent) axes".
// (That's a bug in MSAS.)
assertAxisReturns(
"{CrossJoin([Gender].Members, [Marital Status].Members)}.Item(\"M\", \"M\")",
"{[Gender].[M], [Marital Status].[M]}");
// None found.
assertAxisReturns(
"{[Gender].[M], [Gender].[F]}.Item(\"X\")", "");
assertAxisReturns(
"{CrossJoin([Gender].Members, [Marital Status].Members)}.Item(\"M\", \"F\")",
"");
assertAxisReturns(
"CrossJoin([Gender].Members, [Marital Status].Members).Item(\"S\", \"M\")",
"");
assertAxisThrows(
"CrossJoin([Gender].Members, [Marital Status].Members).Item(\"M\")",
"Argument count does not match set's cardinality 2");
}
public void testTuple() {
assertExprReturns(
"([Gender].[M], "
+ "[Time].[Time].Children.Item(2), "
+ "[Measures].[Unit Sales])",
"33,249");
// Calc calls MemberValue with 3 args -- more efficient than
// constructing a tuple.
assertExprCompilesTo(
"([Gender].[M], [Time].[Time].Children.Item(2), [Measures].[Unit Sales])",
"MemberArrayValueCalc(name=MemberArrayValueCalc, class=class mondrian.calc.impl.MemberArrayValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Gender].[M]>, resultStyle=VALUE_NOT_NULL, value=[Gender].[M])\n"
+ " Item(name=Item, class=class mondrian.olap.fun.SetItemFunDef$5, type=MemberType<hierarchy=[Time]>, resultStyle=VALUE)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Time]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Time], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Time]>, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=DecimalType(0), resultStyle=VALUE_NOT_NULL, value=2)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n");
}
/**
* Tests whether the tuple operator can be applied to arguments of various
* types. See bug 1491699
* "ClassCastException in mondrian.calc.impl.GenericCalc.evaluat".
*/
public void testTupleArgTypes() {
// can coerce dimensions (if they have a unique hierarchy) and
// hierarchies to members
assertExprReturns(
"([Gender], [Time].[Time])",
"266,773");
// can coerce hierarchy to member
assertExprReturns(
"([Gender].[M], " + TimeWeekly + ")", "135,215");
// cannot coerce level to member
assertAxisThrows(
"{([Gender].[M], [Store].[Store City])}",
"No function matches signature '(<Member>, <Level>)'");
// coerce args (hierarchy, member, member, dimension)
assertAxisReturns(
"{([Time.Weekly], [Measures].[Store Sales], [Marital Status].[M], [Promotion Media])}",
"{[Time].[Weekly].[All Weeklys], [Measures].[Store Sales], [Marital Status].[M], [Promotion Media].[All Media]}");
// usage of different hierarchies in the [Time] dimension
assertAxisReturns(
"{([Time.Weekly], [Measures].[Store Sales], [Marital Status].[M], [Time].[Time])}",
"{[Time].[Weekly].[All Weeklys], [Measures].[Store Sales], [Marital Status].[M], [Time].[1997]}");
// two usages of the [Time].[Weekly] hierarchy
if (MondrianProperties.instance().SsasCompatibleNaming.get()) {
assertAxisThrows(
"{([Time].[Weekly], [Measures].[Store Sales], [Marital Status].[M], [Time].[Weekly])}",
"Tuple contains more than one member of hierarchy '[Time].[Weekly]'.");
} else {
assertAxisThrows(
"{([Time.Weekly], [Measures].[Store Sales], [Marital Status].[M], [Time.Weekly])}",
"Tuple contains more than one member of hierarchy '[Time.Weekly]'.");
}
// cannot coerce integer to member
assertAxisThrows(
"{([Gender].[M], 123)}",
"No function matches signature '(<Member>, <Numeric Expression>)'");
}
public void testTupleItem() {
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(2)",
"[Gender].[M]");
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(1)",
"[Customers].[USA].[OR]");
assertAxisReturns(
"{[Time].[1997].[Q1].[1]}.item(0)",
"[Time].[1997].[Q1].[1]");
assertAxisReturns(
"{[Time].[1997].[Q1].[1]}.Item(0).Item(0)",
"[Time].[1997].[Q1].[1]");
// given out of bounds index, item returns null
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(-1)",
"");
// given out of bounds index, item returns null
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(500)",
"");
// empty set
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0)",
"");
// empty set of unknown type
assertExprReturns(
"{}.Item(3)",
"");
// past end of set
assertExprReturns(
"{[Gender].members}.Item(4)",
"");
// negative index
assertExprReturns(
"{[Gender].members}.Item(-50)",
"");
}
public void testTupleAppliedToUnknownHierarchy() {
// manifestation of bug 1735821
assertQueryReturns(
"with \n"
+ "member [Product].[Test] as '([Product].[Food],Dimensions(0).defaultMember)' \n"
+ "select \n"
+ "{[Product].[Test], [Product].[Food]} on columns, \n"
+ "{[Measures].[Store Sales]} on rows \n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[Test]}\n"
+ "{[Product].[Food]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Row #0: 191,940.00\n"
+ "Row #0: 409,035.59\n");
}
public void testTupleDepends()
{
getTestContext().assertMemberExprDependsOn(
"([Store].[USA], [Gender].[F])", "{}");
getTestContext().assertMemberExprDependsOn(
"([Store].[USA], [Gender])", "{[Gender]}");
// in a scalar context, the expression depends on everything except
// the explicitly stated dimensions
getTestContext().assertExprDependsOn(
"([Store].[USA], [Gender])",
TestContext.allHiersExcept("[Store]"));
// The result should be all dims except [Gender], but there's a small
// bug in MemberValueCalc.dependsOn where we escalate 'might depend' to
// 'depends' and we return that it depends on all dimensions.
getTestContext().assertExprDependsOn(
"(Dimensions('Store').CurrentMember, [Gender].[F])",
TestContext.allHiers());
}
public void testItemNull()
{
// In the following queries, MSAS returns 'Formula error - object type
// is not valid - in an <object> base class. An error occurred during
// attempt to get cell value'. This is because in MSAS, Item is a COM
// function, and COM doesn't like null pointers.
//
// Mondrian represents null members as actual objects, so its behavior
// is different.
// MSAS returns error here.
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0).Dimension.Name",
"Gender");
// MSAS returns error here.
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0).Parent",
"");
assertExprReturns(
"(Filter([Store].members, 0 = 0).Item(0).Item(0),"
+ "Filter([Store].members, 0 = 0).Item(0).Item(0))",
"266,773");
if (isDefaultNullMemberRepresentation()) {
// MSAS returns error here.
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0).Name",
"#null");
}
}
public void testTupleNull() {
// if a tuple contains any null members, it evaluates to null
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " { ([Gender].[M], [Store]),\n"
+ " ([Gender].[F], [Store].parent),\n"
+ " ([Gender].parent, [Store])} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[M], [Store].[All Stores]}\n"
+ "Row #0: 135,215\n");
// the set function eliminates tuples which are wholly or partially
// null
assertAxisReturns(
"([Gender].parent, [Marital Status]),\n" // part null
+ " ([Gender].[M], [Marital Status].parent),\n" // part null
+ " ([Gender].parent, [Marital Status].parent),\n" // wholly null
+ " ([Gender].[M], [Marital Status])", // not null
"{[Gender].[M], [Marital Status].[All Marital Status]}");
if (isDefaultNullMemberRepresentation()) {
// The tuple constructor returns a null tuple if one of its
// arguments is null -- and the Item function returns null if the
// tuple is null.
assertExprReturns(
"([Gender].parent, [Marital Status]).Item(0).Name",
"#null");
assertExprReturns(
"([Gender].parent, [Marital Status]).Item(1).Name",
"#null");
}
}
private void checkDataResults(
Double[][] expected,
Result result,
final double tolerance)
{
int[] coords = new int[2];
for (int row = 0; row < expected.length; row++) {
coords[1] = row;
for (int col = 0; col < expected[0].length; col++) {
coords[0] = col;
Cell cell = result.getCell(coords);
final Double expectedValue = expected[row][col];
if (expectedValue == null) {
assertTrue("Expected null value", cell.isNull());
} else if (cell.isNull()) {
fail(
"Cell at (" + row + ", " + col
+ ") was null, but was expecting "
+ expectedValue);
} else {
assertEquals(
"Incorrect value returned at ("
+ row + ", " + col + ")",
expectedValue,
((Number) cell.getValue()).doubleValue(),
tolerance);
}
}
}
}
public void testLevelMemberExpressions() {
// Should return Beverly Hills in California.
assertAxisReturns(
"[Store].[Store City].[Beverly Hills]",
"[Store].[USA].[CA].[Beverly Hills]");
// There are two months named "1" in the time dimension: one
// for 1997 and one for 1998. <Level>.<Member> should return
// the first one.
assertAxisReturns("[Time].[Month].[1]", "[Time].[1997].[Q1].[1]");
// Shouldn't be able to find a member named "Q1" on the month level.
assertAxisThrows(
"[Time].[Month].[Q1]",
"MDX object '[Time].[Month].[Q1]' not found in cube");
}
public void testCaseTestMatch() {
assertExprReturns(
"CASE WHEN 1=0 THEN \"first\" WHEN 1=1 THEN \"second\" WHEN 1=2 THEN \"third\" ELSE \"fourth\" END",
"second");
}
public void testCaseTestMatchElse() {
assertExprReturns(
"CASE WHEN 1=0 THEN \"first\" ELSE \"fourth\" END",
"fourth");
}
public void testCaseTestMatchNoElse() {
assertExprReturns(
"CASE WHEN 1=0 THEN \"first\" END",
"");
}
/**
* Testcase for bug 1799391, "Case Test function throws class cast
* exception"
*/
public void testCaseTestReturnsMemberBug1799391() {
assertQueryReturns(
"WITH\n"
+ " MEMBER [Product].[CaseTest] AS\n"
+ " 'CASE\n"
+ " WHEN [Gender].CurrentMember IS [Gender].[M] THEN [Gender].[F]\n"
+ " ELSE [Gender].[F]\n"
+ " END'\n"
+ " \n"
+ "SELECT {[Product].[CaseTest]} ON 0, {[Gender].[M]} ON 1 FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[CaseTest]}\n"
+ "Axis #2:\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 131,558\n");
assertAxisReturns(
"CASE WHEN 1+1 = 2 THEN [Gender].[F] ELSE [Gender].[F].Parent END",
"[Gender].[F]");
// try case match for good measure
assertAxisReturns(
"CASE 1 WHEN 2 THEN [Gender].[F] ELSE [Gender].[F].Parent END",
"[Gender].[All Gender]");
}
public void testCaseMatch() {
assertExprReturns(
"CASE 2 WHEN 1 THEN \"first\" WHEN 2 THEN \"second\" WHEN 3 THEN \"third\" ELSE \"fourth\" END",
"second");
}
public void testCaseMatchElse() {
assertExprReturns(
"CASE 7 WHEN 1 THEN \"first\" ELSE \"fourth\" END",
"fourth");
}
public void testCaseMatchNoElse() {
assertExprReturns(
"CASE 8 WHEN 0 THEN \"first\" END",
"");
}
public void testCaseTypeMismatch() {
// type mismatch between case and else
assertAxisThrows(
"CASE 1 WHEN 1 THEN 2 ELSE \"foo\" END",
"No function matches signature");
// type mismatch between case and case
assertAxisThrows(
"CASE 1 WHEN 1 THEN 2 WHEN 2 THEN \"foo\" ELSE 3 END",
"No function matches signature");
// type mismatch between value and case
assertAxisThrows(
"CASE 1 WHEN \"foo\" THEN 2 ELSE 3 END",
"No function matches signature");
// non-boolean condition
assertAxisThrows(
"CASE WHEN 1 = 2 THEN 3 WHEN 4 THEN 5 ELSE 6 END",
"No function matches signature");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-853">
* bug MONDRIAN-853, "When using CASE WHEN in a CalculatedMember values are
* not returned the way expected"</a>.
*/
public void testCaseTuple() {
// The case in the bug, simplified. With the bug, returns a member array
// "[Lmondrian.olap.Member;@151b0a5". Type deduction should realize
// that the result is a scalar, therefore a tuple (represented by a
// member array) needs to be evaluated to a scalar. I think that if we
// get the type deduction right, the MDX exp compiler will handle the
// rest.
if (false)
assertExprReturns(
"case 1 when 0 then 1.5\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"135,215");
// "case when" variant always worked
assertExprReturns(
"case when 1=0 then 1.5\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"135,215");
// case 2: cannot deduce type (tuple x) vs. (tuple y). Should be able
// to deduce that the result type is tuple-type<member-type<Gender>,
// member-type<Measures>>.
if (false)
assertExprReturns(
"case when 1=0 then ([Gender].[M], [Measures].[Store Sales])\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"xxx");
// case 3: mixture of member & tuple. Should be able to deduce that
// result type is an expression.
if (false)
assertExprReturns(
"case when 1=0 then ([Measures].[Store Sales])\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"xxx");
}
public void testPropertiesExpr() {
assertExprReturns(
"[Store].[USA].[CA].[Beverly Hills].[Store 6].Properties(\"Store Type\")",
"Gourmet Supermarket");
}
/**
* Test case for bug
* <a href="http://jira.pentaho.com/browse/MONDRIAN-1227">MONDRIAN-1227,
* "Properties function does not implicitly convert dimension to member; has
* documentation typos"</a>.
*/
public void testPropertiesOnDimension() {
// [Store] is a dimension. When called with a property like FirstChild,
// it is implicitly converted to a member.
assertAxisReturns("[Store].FirstChild", "[Store].[Canada]");
// The same should happen with the <Member>.Properties(<String>)
// function; now the bug is fixed, it does. Dimension is implicitly
// converted to member.
assertExprReturns(
"[Store].Properties('MEMBER_UNIQUE_NAME')",
"[Store].[All Stores]");
// Hierarchy is implicitly converted to member.
assertExprReturns(
"[Store].[USA].Hierarchy.Properties('MEMBER_UNIQUE_NAME')",
"[Store].[All Stores]");
}
/**
* Tests that non-existent property throws an error. *
*/
public void testPropertiesNonExistent() {
assertExprThrows(
"[Store].[USA].[CA].[Beverly Hills].[Store 6].Properties(\"Foo\")",
"Property 'Foo' is not valid for");
}
public void testPropertiesFilter() {
Result result = executeQuery(
"SELECT { [Store Sales] } ON COLUMNS,\n"
+ " TOPCOUNT(Filter( [Store].[Store Name].Members,\n"
+ " [Store].CurrentMember.Properties(\"Store Type\") = \"Supermarket\"),\n"
+ " 10, [Store Sales]) ON ROWS\n"
+ "FROM [Sales]");
Assert.assertEquals(8, result.getAxes()[1].getPositions().size());
}
public void testPropertyInCalculatedMember() {
Result result = executeQuery(
"WITH MEMBER [Measures].[Store Sales per Sqft]\n"
+ "AS '[Measures].[Store Sales] / "
+ " [Store].CurrentMember.Properties(\"Store Sqft\")'\n"
+ "SELECT \n"
+ " {[Measures].[Unit Sales], [Measures].[Store Sales per Sqft]} ON COLUMNS,\n"
+ " {[Store].[Store Name].members} ON ROWS\n"
+ "FROM Sales");
Member member;
Cell cell;
member = result.getAxes()[1].getPositions().get(18).get(0);
Assert.assertEquals(
"[Store].[USA].[WA].[Bellingham].[Store 2]",
member.getUniqueName());
cell = result.getCell(new int[]{0, 18});
Assert.assertEquals("2,237", cell.getFormattedValue());
cell = result.getCell(new int[]{1, 18});
Assert.assertEquals(".17", cell.getFormattedValue());
member = result.getAxes()[1].getPositions().get(3).get(0);
Assert.assertEquals(
"[Store].[Mexico].[DF].[San Andres].[Store 21]",
member.getUniqueName());
cell = result.getCell(new int[]{0, 3});
Assert.assertEquals("", cell.getFormattedValue());
cell = result.getCell(new int[]{1, 3});
Assert.assertEquals("", cell.getFormattedValue());
}
public void testOpeningPeriod() {
assertAxisReturns(
"OpeningPeriod([Time].[Month], [Time].[1997].[Q3])",
"[Time].[1997].[Q3].[7]");
assertAxisReturns(
"OpeningPeriod([Time].[Quarter], [Time].[1997])",
"[Time].[1997].[Q1]");
assertAxisReturns(
"OpeningPeriod([Time].[Year], [Time].[1997])", "[Time].[1997]");
assertAxisReturns(
"OpeningPeriod([Time].[Month], [Time].[1997])",
"[Time].[1997].[Q1].[1]");
assertAxisReturns(
"OpeningPeriod([Product].[Product Name], [Product].[All Products].[Drink])",
"[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]");
getTestContext().withCube("[Sales Ragged]").assertAxisReturns(
"OpeningPeriod([Store].[Store City], [Store].[All Stores].[Israel])",
"[Store].[Israel].[Israel].[Haifa]");
getTestContext().withCube("[Sales Ragged]").assertAxisReturns(
"OpeningPeriod([Store].[Store State], [Store].[All Stores].[Israel])",
"");
// Default member is [Time].[1997].
assertAxisReturns(
"OpeningPeriod([Time].[Month])", "[Time].[1997].[Q1].[1]");
assertAxisReturns("OpeningPeriod()", "[Time].[1997].[Q1]");
TestContext testContext = getTestContext().withCube("[Sales Ragged]");
testContext.assertAxisThrows(
"OpeningPeriod([Time].[Year], [Store].[All Stores].[Israel])",
"The <level> and <member> arguments to OpeningPeriod must be "
+ "from the same hierarchy. The level was from '[Time]' but "
+ "the member was from '[Store]'.");
assertAxisThrows(
"OpeningPeriod([Store].[Store City])",
"The <level> and <member> arguments to OpeningPeriod must be "
+ "from the same hierarchy. The level was from '[Store]' but "
+ "the member was from '[Time]'.");
}
/**
* This tests new NULL functionality exception throwing
*
*/
public void testOpeningPeriodNull() {
assertAxisThrows(
"OpeningPeriod([Time].[Month], NULL)",
"Mondrian Error:Failed to parse query 'select {OpeningPeriod([Time].[Month], NULL)} on columns from Sales'");
}
public void testLastPeriods() {
assertAxisReturns(
"LastPeriods(0, [Time].[1998])", "");
assertAxisReturns(
"LastPeriods(1, [Time].[1998])", "[Time].[1998]");
assertAxisReturns(
"LastPeriods(-1, [Time].[1998])", "[Time].[1998]");
assertAxisReturns(
"LastPeriods(2, [Time].[1998])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(-2, [Time].[1997])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(5000, [Time].[1998])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(-5000, [Time].[1997])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(2, [Time].[1998].[Q2])",
"[Time].[1998].[Q1]\n" + "[Time].[1998].[Q2]");
assertAxisReturns(
"LastPeriods(4, [Time].[1998].[Q2])",
"[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1998].[Q1]\n"
+ "[Time].[1998].[Q2]");
assertAxisReturns(
"LastPeriods(-2, [Time].[1997].[Q2])",
"[Time].[1997].[Q2]\n" + "[Time].[1997].[Q3]");
assertAxisReturns(
"LastPeriods(-4, [Time].[1997].[Q2])",
"[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1998].[Q1]");
assertAxisReturns(
"LastPeriods(5000, [Time].[1998].[Q2])",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1998].[Q1]\n"
+ "[Time].[1998].[Q2]");
assertAxisReturns(
"LastPeriods(-5000, [Time].[1998].[Q2])",
"[Time].[1998].[Q2]\n"
+ "[Time].[1998].[Q3]\n"
+ "[Time].[1998].[Q4]");
assertAxisReturns(
"LastPeriods(2, [Time].[1998].[Q2].[5])",
"[Time].[1998].[Q2].[4]\n" + "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(12, [Time].[1998].[Q2].[5])",
"[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]\n"
+ "[Time].[1998].[Q1].[1]\n"
+ "[Time].[1998].[Q1].[2]\n"
+ "[Time].[1998].[Q1].[3]\n"
+ "[Time].[1998].[Q2].[4]\n"
+ "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(-2, [Time].[1998].[Q2].[4])",
"[Time].[1998].[Q2].[4]\n" + "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(-12, [Time].[1997].[Q2].[6])",
"[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]\n"
+ "[Time].[1998].[Q1].[1]\n"
+ "[Time].[1998].[Q1].[2]\n"
+ "[Time].[1998].[Q1].[3]\n"
+ "[Time].[1998].[Q2].[4]\n"
+ "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(2, [Gender].[M])",
"[Gender].[F]\n" + "[Gender].[M]");
assertAxisReturns(
"LastPeriods(-2, [Gender].[F])",
"[Gender].[F]\n" + "[Gender].[M]");
assertAxisReturns(
"LastPeriods(2, [Gender])", "[Gender].[All Gender]");
assertAxisReturns(
"LastPeriods(2, [Gender].Parent)", "");
}
public void testParallelPeriod() {
assertAxisReturns(
"parallelperiod([Time].[Quarter], 1, [Time].[1998].[Q1])",
"[Time].[1997].[Q4]");
assertAxisReturns(
"parallelperiod([Time].[Quarter], -1, [Time].[1997].[Q1])",
"[Time].[1997].[Q2]");
assertAxisReturns(
"parallelperiod([Time].[Year], 1, [Time].[1998].[Q1])",
"[Time].[1997].[Q1]");
assertAxisReturns(
"parallelperiod([Time].[Year], 1, [Time].[1998].[Q1].[1])",
"[Time].[1997].[Q1].[1]");
// No args, therefore finds parallel period to [Time].[1997], which
// would be [Time].[1996], except that that doesn't exist, so null.
assertAxisReturns("ParallelPeriod()", "");
// Parallel period to [Time].[1997], which would be [Time].[1996],
// except that that doesn't exist, so null.
assertAxisReturns(
"ParallelPeriod([Time].[Year], 1, [Time].[1997])", "");
// one parameter, level 2 above member
if (isDefaultNullMemberRepresentation()) {
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Year]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3].[8]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[#null]\n");
}
// one parameter, level 1 above member
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Quarter]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3].[8]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[1997].[Q2].[5]\n");
// one parameter, level same as member
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Month]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3].[8]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[1997].[Q3].[7]\n");
// one parameter, level below member
if (isDefaultNullMemberRepresentation()) {
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Month]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[#null]\n");
}
}
public void _testParallelPeriodThrowsException() {
assertQueryThrows(
"select {parallelperiod([Time].[Year], 1)} on columns "
+ "from [Sales] where ([Time].[1998].[Q1].[2])",
"This should say something about Time appearing on two different axes (slicer an columns)");
}
public void testParallelPeriodDepends() {
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Time].[Quarter], 2.0)", "{[Time]}");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Time].[Quarter], 2.0, [Time].[1997].[Q3])", "{}");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod()",
"{[Time]}");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Product].[Food])", "{[Product]}");
// [Gender].[M] is used here as a numeric expression!
// The numeric expression DOES depend upon [Product].
// The expression as a whole depends upon everything except [Gender].
String s1 = TestContext.allHiersExcept("[Gender]");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Product].[Product Family], [Gender].[M], [Product].[Food])",
s1);
// As above
String s11 = TestContext.allHiersExcept("[Gender]");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Product].[Product Family], [Gender].[M])", s11);
getTestContext().assertSetExprDependsOn(
"parallelperiod([Time].[Time].CurrentMember)",
"{[Time]}");
}
public void testParallelPeriodLevelLag() {
assertQueryReturns(
"with member [Measures].[Prev Unit Sales] as "
+ " '([Measures].[Unit Sales], parallelperiod([Time].[Quarter], 2))' "
+ "select "
+ " crossjoin({[Measures].[Unit Sales], [Measures].[Prev Unit Sales]}, {[Marital Status].[All Marital Status].children}) on columns, "
+ " {[Time].[1997].[Q3]} on rows "
+ "from "
+ " [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales], [Marital Status].[M]}\n"
+ "{[Measures].[Unit Sales], [Marital Status].[S]}\n"
+ "{[Measures].[Prev Unit Sales], [Marital Status].[M]}\n"
+ "{[Measures].[Prev Unit Sales], [Marital Status].[S]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q3]}\n"
+ "Row #0: 32,815\n"
+ "Row #0: 33,033\n"
+ "Row #0: 33,101\n"
+ "Row #0: 33,190\n");
}
public void testParallelPeriodLevel() {
assertQueryReturns(
"with "
+ " member [Measures].[Prev Unit Sales] as "
+ " '([Measures].[Unit Sales], parallelperiod([Time].[Quarter]))' "
+ "select "
+ " crossjoin({[Measures].[Unit Sales], [Measures].[Prev Unit Sales]}, {[Marital Status].[All Marital Status].[M]}) on columns, "
+ " {[Time].[1997].[Q3].[8]} on rows "
+ "from "
+ " [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales], [Marital Status].[M]}\n"
+ "{[Measures].[Prev Unit Sales], [Marital Status].[M]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Row #0: 10,957\n"
+ "Row #0: 10,280\n");
}
public void testPlus() {
getTestContext().assertExprDependsOn("1 + 2", "{}");
String s1 = TestContext.allHiersExcept("[Measures]", "[Gender]");
getTestContext().assertExprDependsOn(
"([Measures].[Unit Sales], [Gender].[F]) + 2", s1);
assertExprReturns("1+2", "3");
assertExprReturns("5 + " + NullNumericExpr, "5"); // 5 + null --> 5
assertExprReturns(NullNumericExpr + " + " + NullNumericExpr, "");
assertExprReturns(NullNumericExpr + " + 0", "0");
}
public void testMinus() {
assertExprReturns("1-3", "-2");
assertExprReturns("5 - " + NullNumericExpr, "5"); // 5 - null --> 5
assertExprReturns(NullNumericExpr + " - - 2", "2");
assertExprReturns(NullNumericExpr + " - " + NullNumericExpr, "");
}
public void testMinus_bug1234759()
{
assertQueryReturns(
"WITH MEMBER [Customers].[USAMinusMexico]\n"
+ "AS '([Customers].[All Customers].[USA] - [Customers].[All Customers].[Mexico])'\n"
+ "SELECT {[Measures].[Unit Sales]} ON COLUMNS,\n"
+ "{[Customers].[All Customers].[USA], [Customers].[All Customers].[Mexico],\n"
+ "[Customers].[USAMinusMexico]} ON ROWS\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[Mexico]}\n"
+ "{[Customers].[USAMinusMexico]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: \n"
+ "Row #2: 266,773\n"
// with bug 1234759, this was null
+ "");
}
public void testMinusAssociativity() {
// right-associative would give 11-(7-5) = 9, which is wrong
assertExprReturns("11-7-5", "-1");
}
public void testMultiply() {
assertExprReturns("4*7", "28");
assertExprReturns("5 * " + NullNumericExpr, ""); // 5 * null --> null
assertExprReturns(NullNumericExpr + " * - 2", "");
assertExprReturns(NullNumericExpr + " - " + NullNumericExpr, "");
}
public void testMultiplyPrecedence() {
assertExprReturns("3 + 4 * 5 + 6", "29");
assertExprReturns("5 * 24 / 4 * 2", "60");
assertExprReturns("48 / 4 / 2", "6");
}
/**
* Bug 774807 caused expressions to be mistaken for the crossjoin
* operator.
*/
public void testMultiplyBug774807() {
final String desiredResult =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[A]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #1: 319,494,143,605.90\n";
assertQueryReturns(
"WITH MEMBER [Measures].[A] AS\n"
+ " '([Measures].[Store Sales] * [Measures].[Store Sales])'\n"
+ "SELECT {[Store]} ON COLUMNS,\n"
+ " {[Measures].[Store Sales], [Measures].[A]} ON ROWS\n"
+ "FROM Sales", desiredResult);
// as above, no parentheses
assertQueryReturns(
"WITH MEMBER [Measures].[A] AS\n"
+ " '[Measures].[Store Sales] * [Measures].[Store Sales]'\n"
+ "SELECT {[Store]} ON COLUMNS,\n"
+ " {[Measures].[Store Sales], [Measures].[A]} ON ROWS\n"
+ "FROM Sales", desiredResult);
// as above, plus 0
assertQueryReturns(
"WITH MEMBER [Measures].[A] AS\n"
+ " '[Measures].[Store Sales] * [Measures].[Store Sales] + 0'\n"
+ "SELECT {[Store]} ON COLUMNS,\n"
+ " {[Measures].[Store Sales], [Measures].[A]} ON ROWS\n"
+ "FROM Sales", desiredResult);
}
public void testDivide() {
assertExprReturns("10 / 5", "2");
assertExprReturns(NullNumericExpr + " / - 2", "");
assertExprReturns(NullNumericExpr + " / " + NullNumericExpr, "");
boolean origNullDenominatorProducesNull =
MondrianProperties.instance().NullDenominatorProducesNull.get();
try {
// default behavior
MondrianProperties.instance().NullDenominatorProducesNull.set(
false);
assertExprReturns("-2 / " + NullNumericExpr, "Infinity");
assertExprReturns("0 / 0", "NaN");
assertExprReturns("-3 / (2 - 2)", "-Infinity");
assertExprReturns("NULL/1", "");
assertExprReturns("NULL/NULL", "");
assertExprReturns("1/NULL", "Infinity");
// when NullOrZeroDenominatorProducesNull is set to true
MondrianProperties.instance().NullDenominatorProducesNull.set(true);
assertExprReturns("-2 / " + NullNumericExpr, "");
assertExprReturns("0 / 0", "NaN");
assertExprReturns("-3 / (2 - 2)", "-Infinity");
assertExprReturns("NULL/1", "");
assertExprReturns("NULL/NULL", "");
assertExprReturns("1/NULL", "");
} finally {
MondrianProperties.instance().NullDenominatorProducesNull.set(
origNullDenominatorProducesNull);
}
}
public void testDividePrecedence() {
assertExprReturns("24 / 4 / 2 * 10 - -1", "31");
}
public void testMod() {
// the following tests are consistent with excel xp
assertExprReturns("mod(11, 3)", "2");
assertExprReturns("mod(-12, 3)", "0");
// can handle non-ints, using the formula MOD(n, d) = n - d * INT(n / d)
assertExprReturns("mod(7.2, 3)", 1.2, 0.0001);
assertExprReturns("mod(7.2, 3.2)", .8, 0.0001);
assertExprReturns("mod(7.2, -3.2)", -2.4, 0.0001);
// per Excel doc "sign of result is same as divisor"
assertExprReturns("mod(3, 2)", "1");
assertExprReturns("mod(-3, 2)", "1");
assertExprReturns("mod(3, -2)", "-1");
assertExprReturns("mod(-3, -2)", "-1");
assertExprThrows(
"mod(4, 0)",
"java.lang.ArithmeticException: / by zero");
assertExprThrows(
"mod(0, 0)",
"java.lang.ArithmeticException: / by zero");
}
public void testUnaryMinus() {
assertExprReturns("-3", "-3");
}
public void testUnaryMinusMember() {
assertExprReturns(
"- ([Measures].[Unit Sales],[Gender].[F])",
"-131,558");
}
public void testUnaryMinusPrecedence() {
assertExprReturns("1 - -10.5 * 2 -3", "19");
}
public void testNegativeZero() {
assertExprReturns("-0.0", "0");
}
public void testNegativeZero1() {
assertExprReturns("-(0.0)", "0");
}
public void testNegativeZeroSubtract() {
assertExprReturns("-0.0 - 0.0", "0");
}
public void testNegativeZeroMultiply() {
assertExprReturns("-1 * 0", "0");
}
public void testNegativeZeroDivide() {
assertExprReturns("-0.0 / 2", "0");
}
public void testString() {
// The String(Integer,Char) function requires us to implicitly cast a
// string to a char.
assertQueryReturns(
"with member measures.x as 'String(3, \"yahoo\")'\n"
+ "select measures.x on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[x]}\n"
+ "Row #0: yyy\n");
// String is converted to char by taking first character
assertExprReturns("String(3, \"yahoo\")", "yyy"); // SSAS agrees
// Integer is converted to char by converting to string and taking first
// character
if (Bug.Ssas2005Compatible) {
// SSAS2005 can implicitly convert an integer (32) to a string, and
// then to a char by taking the first character. Mondrian requires
// an explicit cast.
assertExprReturns("String(3, 32)", "333");
assertExprReturns("String(8, -5)", "--------");
} else {
assertExprReturns("String(3, Cast(32 as string))", "333");
assertExprReturns("String(8, Cast(-5 as string))", "--------");
}
// Error if length<0
assertExprReturns("String(0, 'x')", ""); // SSAS agrees
assertExprThrows(
"String(-1, 'x')", "NegativeArraySizeException"); // SSAS agrees
assertExprThrows(
"String(-200, 'x')", "NegativeArraySizeException"); // SSAS agrees
}
public void testStringConcat() {
assertExprReturns(
" \"foo\" || \"bar\" ",
"foobar");
}
public void testStringConcat2() {
assertExprReturns(
" \"foo\" || [Gender].[M].Name || \"\" ",
"fooM");
}
public void testAnd() {
assertBooleanExprReturns(" 1=1 AND 2=2 ", true);
}
public void testAnd2() {
assertBooleanExprReturns(" 1=1 AND 2=0 ", false);
}
public void testOr() {
assertBooleanExprReturns(" 1=0 OR 2=0 ", false);
}
public void testOr2() {
assertBooleanExprReturns(" 1=0 OR 0=0 ", true);
}
public void testOrAssociativity1() {
// Would give 'false' if OR were stronger than AND (wrong!)
assertBooleanExprReturns(" 1=1 AND 1=0 OR 1=1 ", true);
}
public void testOrAssociativity2() {
// Would give 'false' if OR were stronger than AND (wrong!)
assertBooleanExprReturns(" 1=1 OR 1=0 AND 1=1 ", true);
}
public void testOrAssociativity3() {
assertBooleanExprReturns(" (1=0 OR 1=1) AND 1=1 ", true);
}
public void testXor() {
assertBooleanExprReturns(" 1=1 XOR 2=2 ", false);
}
public void testXorAssociativity() {
// Would give 'false' if XOR were stronger than AND (wrong!)
assertBooleanExprReturns(" 1 = 1 AND 1 = 1 XOR 1 = 0 ", true);
}
public void testNonEmptyCrossJoin() {
// NonEmptyCrossJoin needs to evaluate measures to find out whether
// cells are empty, so it implicitly depends upon all dimensions.
String s1 = TestContext.allHiersExcept("[Store]");
getTestContext().assertSetExprDependsOn(
"NonEmptyCrossJoin([Store].[USA].Children, [Gender].Children)", s1);
assertAxisReturns(
"NonEmptyCrossJoin("
+ "[Customers].[All Customers].[USA].[CA].Children, "
+ "[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].Children)",
"{[Customers].[USA].[CA].[Bellflower], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Downey], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Glendale], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Glendale], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Grossmont], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Imperial Beach], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[La Jolla], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Lincoln Acres], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Lincoln Acres], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Long Beach], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Los Angeles], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Newport Beach], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Pomona], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Pomona], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[San Gabriel], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[West Covina], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[West Covina], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}");
// empty set
assertAxisReturns(
"NonEmptyCrossJoin({Gender.Parent}, {Store.Parent})", "");
assertAxisReturns(
"NonEmptyCrossJoin({Store.Parent}, Gender.Children)", "");
assertAxisReturns("NonEmptyCrossJoin(Store.Members, {})", "");
// same dimension twice
// todo: should throw
if (false) {
assertAxisThrows(
"NonEmptyCrossJoin({Store.[USA]}, {Store.[USA].[CA]})",
"xxx");
}
}
public void testNot() {
assertBooleanExprReturns(" NOT 1=1 ", false);
}
public void testNotNot() {
assertBooleanExprReturns(" NOT NOT 1=1 ", true);
}
public void testNotAssociativity() {
assertBooleanExprReturns(" 1=1 AND NOT 1=1 OR NOT 1=1 AND 1=1 ", false);
}
public void testIsNull() {
assertBooleanExprReturns(" Store.[All Stores] IS NULL ", false);
assertBooleanExprReturns(" Store.[All Stores].parent IS NULL ", true);
}
public void testIsMember() {
assertBooleanExprReturns(
" Store.[USA].parent IS Store.[All Stores]", true);
assertBooleanExprReturns(
" [Store].[USA].[CA].parent IS [Store].[Mexico]", false);
}
public void testIsString() {
assertExprThrows(
" [Store].[USA].Name IS \"USA\" ",
"No function matches signature '<String> IS <String>'");
}
public void testIsNumeric() {
assertExprThrows(
" [Store].[USA].Level.Ordinal IS 25 ",
"No function matches signature '<Numeric Expression> IS <Numeric Expression>'");
}
public void testIsTuple() {
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Store.[USA], Gender.[M])", true);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Gender.[M], Store.[USA])", true);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Gender.[M], Store.[USA]) "
+ "OR [Gender] IS NULL",
true);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Gender.[M], Store.[USA]) "
+ "AND [Gender] IS NULL",
false);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Store.[USA], Gender.[F])",
false);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Store.[USA])",
false);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS Store.[USA]",
false);
}
public void testIsLevel() {
assertBooleanExprReturns(
" Store.[USA].level IS Store.[Store Country] ", true);
assertBooleanExprReturns(
" Store.[USA].[CA].level IS Store.[Store Country] ", false);
}
public void testIsHierarchy() {
assertBooleanExprReturns(
" Store.[USA].hierarchy IS Store.[Mexico].hierarchy ", true);
assertBooleanExprReturns(
" Store.[USA].hierarchy IS Gender.[M].hierarchy ", false);
}
public void testIsDimension() {
assertBooleanExprReturns(" Store.[USA].dimension IS Store ", true);
assertBooleanExprReturns(" Gender.[M].dimension IS Store ", false);
}
public void testStringEquals() {
assertBooleanExprReturns(" \"foo\" = \"bar\" ", false);
}
public void testStringEqualsAssociativity() {
assertBooleanExprReturns(" \"foo\" = \"fo\" || \"o\" ", true);
}
public void testStringEqualsEmpty() {
assertBooleanExprReturns(" \"\" = \"\" ", true);
}
public void testEq() {
assertBooleanExprReturns(" 1.0 = 1 ", true);
assertBooleanExprReturns(
"[Product].CurrentMember.Level.Ordinal = 2.0", false);
checkNullOp("=");
}
public void testStringNe() {
assertBooleanExprReturns(" \"foo\" <> \"bar\" ", true);
}
public void testNe() {
assertBooleanExprReturns(" 2 <> 1.0 + 1.0 ", false);
checkNullOp("<>");
}
public void testNeInfinity() {
// Infinity does not equal itself
assertBooleanExprReturns("(1 / 0) <> (1 / 0)", false);
}
public void testLt() {
assertBooleanExprReturns(" 2 < 1.0 + 1.0 ", false);
checkNullOp("<");
}
public void testLe() {
assertBooleanExprReturns(" 2 <= 1.0 + 1.0 ", true);
checkNullOp("<=");
}
public void testGt() {
assertBooleanExprReturns(" 2 > 1.0 + 1.0 ", false);
checkNullOp(">");
}
public void testGe() {
assertBooleanExprReturns(" 2 > 1.0 + 1.0 ", false);
checkNullOp(">=");
}
private void checkNullOp(final String op) {
assertBooleanExprReturns(" 0 " + op + " " + NullNumericExpr, false);
assertBooleanExprReturns(NullNumericExpr + " " + op + " 0", false);
assertBooleanExprReturns(
NullNumericExpr + " " + op + " " + NullNumericExpr, false);
}
public void testDistinctTwoMembers() {
getTestContext().withCube("HR").assertAxisReturns(
"Distinct({[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[Sheri Nowmer].[Donna Arnold]})",
"[Employees].[Sheri Nowmer].[Donna Arnold]");
}
public void testDistinctThreeMembers() {
getTestContext().withCube("HR").assertAxisReturns(
"Distinct({[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Darren Stanz],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold]})",
"[Employees].[Sheri Nowmer].[Donna Arnold]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz]");
}
public void testDistinctFourMembers() {
getTestContext().withCube("HR").assertAxisReturns(
"Distinct({[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Darren Stanz],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Darren Stanz]})",
"[Employees].[Sheri Nowmer].[Donna Arnold]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz]");
}
public void testDistinctTwoTuples() {
getTestContext().assertAxisReturns(
"Distinct({([Time].[1997],[Store].[All Stores].[Mexico]), "
+ "([Time].[1997], [Store].[All Stores].[Mexico])})",
"{[Time].[1997], [Store].[Mexico]}");
}
public void testDistinctSomeTuples() {
getTestContext().assertAxisReturns(
"Distinct({([Time].[1997],[Store].[All Stores].[Mexico]), "
+ "crossjoin({[Time].[1997]},{[Store].[All Stores].children})})",
"{[Time].[1997], [Store].[Mexico]}\n"
+ "{[Time].[1997], [Store].[Canada]}\n"
+ "{[Time].[1997], [Store].[USA]}");
}
/**
* Make sure that slicer is in force when expression is applied
* on axis, E.g. select filter([Customers].members, [Unit Sales] > 100)
* from sales where ([Time].[1998])
*/
public void testFilterWithSlicer() {
Result result = executeQuery(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " filter([Customers].[USA].children,\n"
+ " [Measures].[Unit Sales] > 20000) on rows\n"
+ "from Sales\n"
+ "where ([Time].[1997].[Q1])");
Axis rows = result.getAxes()[1];
// if slicer were ignored, there would be 3 rows
Assert.assertEquals(1, rows.getPositions().size());
Cell cell = result.getCell(new int[]{0, 0});
Assert.assertEquals("30,114", cell.getFormattedValue());
}
public void testFilterCompound() {
Result result = executeQuery(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " Filter(\n"
+ " CrossJoin(\n"
+ " [Gender].Children,\n"
+ " [Customers].[USA].Children),\n"
+ " [Measures].[Unit Sales] > 9500) on rows\n"
+ "from Sales\n"
+ "where ([Time].[1997].[Q1])");
List<Position> rows = result.getAxes()[1].getPositions();
Assert.assertEquals(3, rows.size());
Assert.assertEquals("F", rows.get(0).get(0).getName());
Assert.assertEquals("WA", rows.get(0).get(1).getName());
Assert.assertEquals("M", rows.get(1).get(0).getName());
Assert.assertEquals("OR", rows.get(1).get(1).getName());
Assert.assertEquals("M", rows.get(2).get(0).getName());
Assert.assertEquals("WA", rows.get(2).get(1).getName());
}
public void testGenerateDepends() {
getTestContext().assertSetExprDependsOn(
"Generate([Product].CurrentMember.Children, Crossjoin({[Product].CurrentMember}, Crossjoin([Store].[Store State].Members, [Store Type].Members)), ALL)",
"{[Product]}");
getTestContext().assertSetExprDependsOn(
"Generate([Product].[All Products].Children, Crossjoin({[Product].CurrentMember}, Crossjoin([Store].[Store State].Members, [Store Type].Members)), ALL)",
"{}");
getTestContext().assertSetExprDependsOn(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Store].CurrentMember.Children})",
"{}");
getTestContext().assertSetExprDependsOn(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Gender].CurrentMember})",
"{[Gender]}");
getTestContext().assertSetExprDependsOn(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Gender].[M]})",
"{}");
}
public void testGenerate() {
assertAxisReturns(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Store].CurrentMember.Children})",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]");
}
public void testGenerateNonSet() {
// SSAS implicitly converts arg #2 to a set
assertAxisReturns(
"Generate({[Store].[USA], [Store].[USA].[CA]}, [Store].PrevMember, ALL)",
"[Store].[Mexico]\n"
+ "[Store].[Mexico].[Zacatecas]");
// SSAS implicitly converts arg #1 to a set
assertAxisReturns(
"Generate([Store].[USA], [Store].PrevMember, ALL)",
"[Store].[Mexico]");
}
public void testGenerateAll() {
assertAxisReturns(
"Generate({[Store].[USA].[CA], [Store].[USA].[OR].[Portland]},"
+ " Ascendants([Store].CurrentMember),"
+ " ALL)",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]");
}
public void testGenerateUnique() {
assertAxisReturns(
"Generate({[Store].[USA].[CA], [Store].[USA].[OR].[Portland]},"
+ " Ascendants([Store].CurrentMember))",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[OR]");
}
public void testGenerateUniqueTuple() {
assertAxisReturns(
"Generate({([Store].[USA].[CA],[Product].[All Products]), "
+ "([Store].[USA].[CA],[Product].[All Products])},"
+ "{([Store].CurrentMember, [Product].CurrentMember)})",
"{[Store].[USA].[CA], [Product].[All Products]}");
}
public void testGenerateCrossJoin() {
// Note that the different regions have different Top 2.
assertAxisReturns(
"Generate({[Store].[USA].[CA], [Store].[USA].[CA].[San Francisco]},\n"
+ " CrossJoin({[Store].CurrentMember},\n"
+ " TopCount([Product].[Brand Name].members, \n"
+ " 2,\n"
+ " [Measures].[Unit Sales])))",
"{[Store].[USA].[CA], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Hermanos]}\n"
+ "{[Store].[USA].[CA], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Tell Tale]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Ebony]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[High Top]}");
}
public void testGenerateString() {
assertExprReturns(
"Generate({Time.[1997], Time.[1998]},"
+ " Time.[Time].CurrentMember.Name)",
"19971998");
assertExprReturns(
"Generate({Time.[1997], Time.[1998]},"
+ " Time.[Time].CurrentMember.Name, \" and \")",
"1997 and 1998");
}
public void testHead() {
assertAxisReturns(
"Head([Store].Children, 2)",
"[Store].[Canada]\n"
+ "[Store].[Mexico]");
}
public void testHeadNegative() {
assertAxisReturns(
"Head([Store].Children, 2 - 3)",
"");
}
public void testHeadDefault() {
assertAxisReturns(
"Head([Store].Children)",
"[Store].[Canada]");
}
public void testHeadOvershoot() {
assertAxisReturns(
"Head([Store].Children, 2 + 2)",
"[Store].[Canada]\n"
+ "[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testHeadEmpty() {
assertAxisReturns(
"Head([Gender].[F].Children, 2)",
"");
assertAxisReturns(
"Head([Gender].[F].Children)",
"");
}
/**
* Test case for bug 2488492, "Union between calc mem and head function
* throws exception"
*/
public void testHeadBug() {
assertQueryReturns(
"SELECT\n"
+ " UNION(\n"
+ " {([Customers].CURRENTMEMBER)},\n"
+ " HEAD(\n"
+ " {([Customers].CURRENTMEMBER)},\n"
+ " IIF(\n"
+ " COUNT(\n"
+ " FILTER(\n"
+ " DESCENDANTS(\n"
+ " [Customers].CURRENTMEMBER,\n"
+ " [Customers].[Country]),\n"
+ " [Measures].[Unit Sales] >= 66),\n"
+ " INCLUDEEMPTY)> 0,\n"
+ " 1,\n"
+ " 0)),\n"
+ " ALL)\n"
+ " ON AXIS(0)\n"
+ "FROM\n"
+ " [Sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[All Customers]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 266,773\n");
assertQueryReturns(
"WITH\n"
+ " MEMBER\n"
+ " [Customers].[COG_OQP_INT_t2]AS '1',\n"
+ " SOLVE_ORDER = 65535\n"
+ "SELECT\n"
+ " UNION(\n"
+ " {([Customers].[COG_OQP_INT_t2])},\n"
+ " HEAD(\n"
+ " {([Customers].CURRENTMEMBER)},\n"
+ " IIF(\n"
+ " COUNT(\n"
+ " FILTER(\n"
+ " DESCENDANTS(\n"
+ " [Customers].CURRENTMEMBER,\n"
+ " [Customers].[Country]),\n"
+ " [Measures].[Unit Sales]>= 66),\n"
+ " INCLUDEEMPTY)> 0,\n"
+ " 1,\n"
+ " 0)),\n"
+ " ALL)\n"
+ " ON AXIS(0)\n"
+ "FROM\n"
+ " [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[COG_OQP_INT_t2]}\n"
+ "{[Customers].[All Customers]}\n"
+ "Row #0: 1\n"
+ "Row #0: 266,773\n");
// More minimal test case. Also demonstrates similar problem with Tail.
assertAxisReturns(
"Union(\n"
+ " Union(\n"
+ " Tail([Customers].[USA].[CA].Children, 2),\n"
+ " Head([Customers].[USA].[WA].Children, 2),\n"
+ " ALL),\n"
+ " Tail([Customers].[USA].[OR].Children, 2),"
+ " ALL)",
"[Customers].[USA].[CA].[West Covina]\n"
+ "[Customers].[USA].[CA].[Woodland Hills]\n"
+ "[Customers].[USA].[WA].[Anacortes]\n"
+ "[Customers].[USA].[WA].[Ballard]\n"
+ "[Customers].[USA].[OR].[W. Linn]\n"
+ "[Customers].[USA].[OR].[Woodburn]");
}
public void testHierarchize() {
assertAxisReturns(
"Hierarchize(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Drink],\n"
+ " [Product].[Non-Consumable],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]})",
"[Product].[All Products]\n"
+ "[Product].[Drink]\n"
+ "[Product].[Drink].[Dairy]\n"
+ "[Product].[Food]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Non-Consumable]");
}
public void testHierarchizePost() {
assertAxisReturns(
"Hierarchize(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " POST)",
"[Product].[Drink].[Dairy]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Food]\n"
+ "[Product].[All Products]");
}
public void testHierarchizePC() {
getTestContext().withCube("HR").assertAxisReturns(
"Hierarchize(\n"
+ " { Subset([Employees].Members, 90, 10),\n"
+ " Head([Employees].Members, 5) })",
"[Employees].[All Employees]\n"
+ "[Employees].[Sheri Nowmer]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Shauna Wyro]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Leopoldo Renfro]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Donna Brockett]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Laurie Anderson]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Louis Gomez]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Melvin Glass]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Kristin Cohen]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Susan Kharman]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Gordon Kirschner]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Geneva Kouba]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Tricia Clark]");
}
public void testHierarchizeCrossJoinPre() {
assertAxisReturns(
"Hierarchize(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " [Gender].MEMBERS),\n"
+ " PRE)",
"{[Product].[All Products], [Gender].[All Gender]}\n"
+ "{[Product].[All Products], [Gender].[F]}\n"
+ "{[Product].[All Products], [Gender].[M]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[All Gender]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[F]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[M]}\n"
+ "{[Product].[Food], [Gender].[All Gender]}\n"
+ "{[Product].[Food], [Gender].[F]}\n"
+ "{[Product].[Food], [Gender].[M]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[All Gender]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[F]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[M]}");
}
public void testHierarchizeCrossJoinPost() {
assertAxisReturns(
"Hierarchize(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " [Gender].MEMBERS),\n"
+ " POST)",
"{[Product].[Drink].[Dairy], [Gender].[F]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[M]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[All Gender]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[F]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[M]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[All Gender]}\n"
+ "{[Product].[Food], [Gender].[F]}\n"
+ "{[Product].[Food], [Gender].[M]}\n"
+ "{[Product].[Food], [Gender].[All Gender]}\n"
+ "{[Product].[All Products], [Gender].[F]}\n"
+ "{[Product].[All Products], [Gender].[M]}\n"
+ "{[Product].[All Products], [Gender].[All Gender]}");
}
/**
* Tests that the Hierarchize function works correctly when applied to
* a level whose ordering is determined by an 'ordinal' property.
* TODO: fix this test (bug 1220787)
*
* WG: Note that this is disabled right now due to its impact on other
* tests later on within the test suite, specifically XMLA tests that
* return a list of cubes. We could run this test after XMLA, or clear
* out the cache to solve this.
*/
public void testHierarchizeOrdinal() {
TestContext context = getTestContext().withCube("[Sales_Hierarchize]");
final Connection connection = context.getConnection();
connection.getSchema().createCube(
"<Cube name=\"Sales_Hierarchize\">\n"
+ " <Table name=\"sales_fact_1997\"/>\n"
+ " <Dimension name=\"Time_Alphabetical\" type=\"TimeDimension\" foreignKey=\"time_id\">\n"
+ " <Hierarchy hasAll=\"false\" primaryKey=\"time_id\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n"
+ " levelType=\"TimeYears\"/>\n"
+ " <Level name=\"Quarter\" column=\"quarter\" uniqueMembers=\"false\"\n"
+ " levelType=\"TimeQuarters\"/>\n"
+ " <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " ordinalColumn=\"the_month\"\n"
+ " levelType=\"TimeMonths\"/>\n"
+ " </Hierarchy>\n"
+ " </Dimension>\n"
+ "\n"
+ " <Dimension name=\"Month_Alphabetical\" type=\"TimeDimension\" foreignKey=\"time_id\">\n"
+ " <Hierarchy hasAll=\"false\" primaryKey=\"time_id\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " ordinalColumn=\"the_month\"\n"
+ " levelType=\"TimeMonths\"/>\n"
+ " </Hierarchy>\n"
+ " </Dimension>\n"
+ "\n"
+ " <Measure name=\"Unit Sales\" column=\"unit_sales\" aggregator=\"sum\"\n"
+ " formatString=\"Standard\"/>\n"
+ "</Cube>");
// The [Time_Alphabetical] is ordered alphabetically by month
context.assertAxisReturns(
"Hierarchize([Time_Alphabetical].members)",
"[Time_Alphabetical].[1997]\n"
+ "[Time_Alphabetical].[1997].[Q1]\n"
+ "[Time_Alphabetical].[1997].[Q1].[2]\n"
+ "[Time_Alphabetical].[1997].[Q1].[1]\n"
+ "[Time_Alphabetical].[1997].[Q1].[3]\n"
+ "[Time_Alphabetical].[1997].[Q2]\n"
+ "[Time_Alphabetical].[1997].[Q2].[4]\n"
+ "[Time_Alphabetical].[1997].[Q2].[6]\n"
+ "[Time_Alphabetical].[1997].[Q2].[5]\n"
+ "[Time_Alphabetical].[1997].[Q3]\n"
+ "[Time_Alphabetical].[1997].[Q3].[8]\n"
+ "[Time_Alphabetical].[1997].[Q3].[7]\n"
+ "[Time_Alphabetical].[1997].[Q3].[9]\n"
+ "[Time_Alphabetical].[1997].[Q4]\n"
+ "[Time_Alphabetical].[1997].[Q4].[12]\n"
+ "[Time_Alphabetical].[1997].[Q4].[11]\n"
+ "[Time_Alphabetical].[1997].[Q4].[10]\n"
+ "[Time_Alphabetical].[1998]\n"
+ "[Time_Alphabetical].[1998].[Q1]\n"
+ "[Time_Alphabetical].[1998].[Q1].[2]\n"
+ "[Time_Alphabetical].[1998].[Q1].[1]\n"
+ "[Time_Alphabetical].[1998].[Q1].[3]\n"
+ "[Time_Alphabetical].[1998].[Q2]\n"
+ "[Time_Alphabetical].[1998].[Q2].[4]\n"
+ "[Time_Alphabetical].[1998].[Q2].[6]\n"
+ "[Time_Alphabetical].[1998].[Q2].[5]\n"
+ "[Time_Alphabetical].[1998].[Q3]\n"
+ "[Time_Alphabetical].[1998].[Q3].[8]\n"
+ "[Time_Alphabetical].[1998].[Q3].[7]\n"
+ "[Time_Alphabetical].[1998].[Q3].[9]\n"
+ "[Time_Alphabetical].[1998].[Q4]\n"
+ "[Time_Alphabetical].[1998].[Q4].[12]\n"
+ "[Time_Alphabetical].[1998].[Q4].[11]\n"
+ "[Time_Alphabetical].[1998].[Q4].[10]");
// The [Month_Alphabetical] is a single-level hierarchy ordered
// alphabetically by month.
context.assertAxisReturns(
"Hierarchize([Month_Alphabetical].members)",
"[Month_Alphabetical].[4]\n"
+ "[Month_Alphabetical].[8]\n"
+ "[Month_Alphabetical].[12]\n"
+ "[Month_Alphabetical].[2]\n"
+ "[Month_Alphabetical].[1]\n"
+ "[Month_Alphabetical].[7]\n"
+ "[Month_Alphabetical].[6]\n"
+ "[Month_Alphabetical].[3]\n"
+ "[Month_Alphabetical].[5]\n"
+ "[Month_Alphabetical].[11]\n"
+ "[Month_Alphabetical].[10]\n"
+ "[Month_Alphabetical].[9]");
// clear the cache so that future tests don't fail that expect a
// specific set of cubes
TestContext.instance().flushSchemaCache();
}
public void testIntersectAll() {
// Note: duplicates retained from left, not from right; and order is
// preserved.
assertAxisReturns(
"Intersect({[Time].[1997].[Q2], [Time].[1997], [Time].[1997].[Q1], [Time].[1997].[Q2]}, "
+ "{[Time].[1998], [Time].[1997], [Time].[1997].[Q2], [Time].[1997]}, "
+ "ALL)",
"[Time].[1997].[Q2]\n"
+ "[Time].[1997]\n"
+ "[Time].[1997].[Q2]");
}
public void testIntersect() {
// Duplicates not preserved. Output in order that first duplicate
// occurred.
assertAxisReturns(
"Intersect(\n"
+ " {[Time].[1997].[Q2], [Time].[1997], [Time].[1997].[Q1], [Time].[1997].[Q2]}, "
+ "{[Time].[1998], [Time].[1997], [Time].[1997].[Q2], [Time].[1997]})",
"[Time].[1997].[Q2]\n"
+ "[Time].[1997]");
}
public void testIntersectTuples() {
assertAxisReturns(
"Intersect(\n"
+ " {([Time].[1997].[Q2], [Gender].[M]),\n"
+ " ([Time].[1997], [Gender].[F]),\n"
+ " ([Time].[1997].[Q1], [Gender].[M]),\n"
+ " ([Time].[1997].[Q2], [Gender].[M])},\n"
+ " {([Time].[1998], [Gender].[F]),\n"
+ " ([Time].[1997], [Gender].[F]),\n"
+ " ([Time].[1997].[Q2], [Gender].[M]),\n"
+ " ([Time].[1997], [Gender])})",
"{[Time].[1997].[Q2], [Gender].[M]}\n"
+ "{[Time].[1997], [Gender].[F]}");
}
public void testIntersectRightEmpty() {
assertAxisReturns(
"Intersect({[Time].[1997]}, {})",
"");
}
public void testIntersectLeftEmpty() {
assertAxisReturns(
"Intersect({}, {[Store].[USA].[CA]})",
"");
}
public void testOrderDepends() {
// Order(<Set>, <Value Expression>) depends upon everything
// <Value Expression> depends upon, except the dimensions of <Set>.
// Depends upon everything EXCEPT [Product], [Measures],
// [Marital Status], [Gender].
String s11 = TestContext.allHiersExcept(
"[Product]", "[Measures]", "[Marital Status]", "[Gender]");
getTestContext().assertSetExprDependsOn(
"Order("
+ " Crossjoin([Gender].MEMBERS, [Product].MEMBERS),"
+ " ([Measures].[Unit Sales], [Marital Status].[S]),"
+ " ASC)",
s11);
// Depends upon everything EXCEPT [Product], [Measures],
// [Marital Status]. Does depend upon [Gender].
String s12 = TestContext.allHiersExcept(
"[Product]", "[Measures]", "[Marital Status]");
getTestContext().assertSetExprDependsOn(
"Order("
+ " Crossjoin({[Gender].CurrentMember}, [Product].MEMBERS),"
+ " ([Measures].[Unit Sales], [Marital Status].[S]),"
+ " ASC)",
s12);
// Depends upon everything except [Measures].
String s13 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertSetExprDependsOn(
"Order("
+ " Crossjoin("
+ " [Gender].CurrentMember.Children, "
+ " [Marital Status].CurrentMember.Children), "
+ " [Measures].[Unit Sales], "
+ " BDESC)",
s13);
String s1 = TestContext.allHiersExcept(
"[Measures]", "[Store]", "[Product]", "[Time]");
getTestContext().assertSetExprDependsOn(
" Order(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products].[Food].[Eggs],\n"
+ " [Product].[All Products].[Food].[Seafood],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]},\n"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]}),\n"
+ " ([Time].[1997].[Q1], [Measures].[Unit Sales]),\n"
+ " ASC)",
s1);
}
public void testOrderCalc() {
if (Util.Retrowoven) {
// If retrowoven, we don't use Iterable, so plans are different.
return;
}
// [Measures].[Unit Sales] is a constant member, so it is evaluated in
// a ContextCalc.
assertAxisCompilesTo(
"order([Product].children, [Measures].[Unit Sales])",
"ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n"
+ " CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " ValueCalc(name=ValueCalc, class=class mondrian.calc.impl.ValueCalc, type=SCALAR, resultStyle=VALUE)\n");
// [Time].[1997] is constant, and is evaluated in a ContextCalc.
// [Product].Parent is variable, and is evaluated inside the loop.
assertAxisCompilesTo(
"order([Product].children,"
+ " ([Time].[1997], [Product].CurrentMember.Parent))",
"ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Time].[1997]>, resultStyle=VALUE_NOT_NULL, value=[Time].[1997])\n"
+ " CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Parent(name=Parent, class=class mondrian.olap.fun.BuiltinFunTable$15$1, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n");
// No ContextCalc this time. All members are non-variable.
assertAxisCompilesTo(
"order([Product].children, [Product].CurrentMember.Parent)",
"CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Parent(name=Parent, class=class mondrian.olap.fun.BuiltinFunTable$15$1, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n");
// List expression is dependent on one of the constant calcs. It cannot
// be pulled up, so [Gender].[M] is not in the ContextCalc.
// Note that there is no CopyListCalc - because Filter creates its own
// mutable copy.
// Under JDK 1.4, needs an extra converter from list to iterator,
// because JDK 1.4 doesn't support the ITERABLE result style.
assertAxisCompilesTo(
"order(filter([Product].children, [Measures].[Unit Sales] > 1000), "
+ "([Gender].[M], [Measures].[Store Sales]))",
Util.Retrowoven
? ""
+ "ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Store Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Store Sales])\n"
+ " MemberCalcImpl(name=MemberCalcImpl, class=class mondrian.olap.fun.OrderFunDef$MemberCalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " MemberListIterCalc(name=MemberListIterCalc, class=class mondrian.calc.impl.AbstractExpCompiler$MemberListIterCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=ITERABLE)\n"
+ " ImmutableMemberListCalc(name=ImmutableMemberListCalc, class=class mondrian.olap.fun.FilterFunDef$ImmutableMemberListCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " >(name=>, class=class mondrian.olap.fun.BuiltinFunTable$63$1, type=BOOLEAN, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=NUMERIC, resultStyle=VALUE_NOT_NULL, value=1000.0)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Gender].[M]>, resultStyle=VALUE_NOT_NULL, value=[Gender].[M])\n"
: ""
+ "ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Store Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Store Sales])\n"
+ " CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " ImmutableIterCalc(name=ImmutableIterCalc, class=class mondrian.olap.fun.FilterFunDef$ImmutableIterCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=ITERABLE)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " >(name=>, class=class mondrian.olap.fun.BuiltinFunTable$63$1, type=BOOLEAN, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=NUMERIC, resultStyle=VALUE_NOT_NULL, value=1000.0)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Gender].[M]>, resultStyle=VALUE_NOT_NULL, value=[Gender].[M])\n");
}
/**
* Verifies that the order function works with a defined member.
* See this forum post for additional information:
* http://forums.pentaho.com/showthread.php?p=179473#post179473
*/
public void testOrderWithMember() {
assertQueryReturns(
"with member [Measures].[Product Name Length] as "
+ "'LEN([Product].CurrentMember.Name)'\n"
+ "select {[Measures].[Product Name Length]} ON COLUMNS,\n"
+ "Order([Product].[All Products].Children, "
+ "[Measures].[Product Name Length], BASC) ON ROWS\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Product Name Length]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 4\n"
+ "Row #1: 5\n"
+ "Row #2: 14\n");
}
/**
* test case for bug # 1797159, Potential MDX Order Non Empty Problem
*
*/
public void testOrderNonEmpty() {
assertQueryReturns(
"select NON EMPTY [Gender].Members ON COLUMNS,\n"
+ "NON EMPTY Order([Product].[All Products].[Drink].Children,\n"
+ "[Gender].[All Gender].[F], ASC) ON ROWS\n"
+ "from [Sales]\n"
+ "where ([Customers].[All Customers].[USA].[CA].[San Francisco],\n"
+ " [Time].[1997])",
"Axis #0:\n"
+ "{[Customers].[USA].[CA].[San Francisco], [Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "Row #0: 2\n"
+ "Row #0: \n"
+ "Row #0: 2\n"
+ "Row #1: 4\n"
+ "Row #1: 2\n"
+ "Row #1: 2\n");
}
public void testOrder() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " order({\n"
+ " [Product].[All Products].[Drink],\n"
+ " [Product].[All Products].[Drink].[Beverages],\n"
+ " [Product].[All Products].[Drink].[Dairy],\n"
+ " [Product].[All Products].[Food],\n"
+ " [Product].[All Products].[Food].[Baked Goods],\n"
+ " [Product].[All Products].[Food].[Eggs],\n"
+ " [Product].[All Products]},\n"
+ " [Measures].[Unit Sales]) on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[All Products]}\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Food].[Eggs]}\n"
+ "{[Product].[Food].[Baked Goods]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 24,597\n"
+ "Row #2: 4,186\n"
+ "Row #3: 13,573\n"
+ "Row #4: 191,940\n"
+ "Row #5: 4,132\n"
+ "Row #6: 7,870\n");
}
public void testOrderParentsMissing() {
// Paradoxically, [Alcoholic Beverages] comes before
// [Eggs] even though it has a larger value, because
// its parent [Drink] has a smaller value than [Food].
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,"
+ " order({\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages],\n"
+ " [Product].[All Products].[Food].[Eggs]},\n"
+ " [Measures].[Unit Sales], ASC) on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Product].[Food].[Eggs]}\n"
+ "Row #0: 6,838\n"
+ "Row #1: 4,132\n");
}
public void testOrderCrossJoinBreak() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " Order(\n"
+ " CrossJoin(\n"
+ " [Gender].children,\n"
+ " [Marital Status].children),\n"
+ " [Measures].[Unit Sales],\n"
+ " BDESC) on rows\n"
+ "from Sales\n"
+ "where [Time].[1997].[Q1]",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[M], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[M]}\n"
+ "{[Gender].[M], [Marital Status].[M]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "Row #0: 17,070\n"
+ "Row #1: 16,790\n"
+ "Row #2: 16,311\n"
+ "Row #3: 16,120\n");
}
public void testOrderCrossJoin() {
// Note:
// 1. [Alcoholic Beverages] collates before [Eggs] and
// [Seafood] because its parent, [Drink], is less
// than [Food]
// 2. [Seattle] generally sorts after [CA] and [OR]
// because invisible parent [WA] is greater.
assertQueryReturns(
"select CrossJoin(\n"
+ " {[Time].[1997],\n"
+ " [Time].[1997].[Q1]},\n"
+ " {[Measures].[Unit Sales]}) on columns,\n"
+ " Order(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products].[Food].[Eggs],\n"
+ " [Product].[All Products].[Food].[Seafood],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]},\n"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]}),\n"
+ " ([Time].[1997].[Q1], [Measures].[Unit Sales]),\n"
+ " ASC) on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997], [Measures].[Unit Sales]}\n"
+ "{[Time].[1997].[Q1], [Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages], [Store].[USA].[OR]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages], [Store].[USA].[CA]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages], [Store].[USA].[WA].[Seattle]}\n"
+ "{[Product].[Food].[Seafood], [Store].[USA].[CA]}\n"
+ "{[Product].[Food].[Seafood], [Store].[USA].[OR]}\n"
+ "{[Product].[Food].[Seafood], [Store].[USA].[WA].[Seattle]}\n"
+ "{[Product].[Food].[Eggs], [Store].[USA].[CA]}\n"
+ "{[Product].[Food].[Eggs], [Store].[USA].[OR]}\n"
+ "{[Product].[Food].[Eggs], [Store].[USA].[WA].[Seattle]}\n"
+ "Row #0: 1,680\n"
+ "Row #0: 393\n"
+ "Row #1: 1,936\n"
+ "Row #1: 431\n"
+ "Row #2: 635\n"
+ "Row #2: 142\n"
+ "Row #3: 441\n"
+ "Row #3: 91\n"
+ "Row #4: 451\n"
+ "Row #4: 107\n"
+ "Row #5: 217\n"
+ "Row #5: 44\n"
+ "Row #6: 1,116\n"
+ "Row #6: 240\n"
+ "Row #7: 1,119\n"
+ "Row #7: 251\n"
+ "Row #8: 373\n"
+ "Row #8: 57\n");
}
public void testOrderHierarchicalDesc() {
assertAxisReturns(
"Order(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Drink],\n"
+ " [Product].[Non-Consumable],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " [Measures].[Unit Sales],\n"
+ " DESC)",
"[Product].[All Products]\n"
+ "[Product].[Food]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Non-Consumable]\n"
+ "[Product].[Drink]\n"
+ "[Product].[Drink].[Dairy]");
}
public void testOrderCrossJoinDesc() {
assertAxisReturns(
"Order(\n"
+ " CrossJoin(\n"
+ " {[Gender].[M], [Gender].[F]},\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Drink],\n"
+ " [Product].[Non-Consumable],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]}),\n"
+ " [Measures].[Unit Sales],\n"
+ " DESC)",
"{[Gender].[M], [Product].[All Products]}\n"
+ "{[Gender].[M], [Product].[Food]}\n"
+ "{[Gender].[M], [Product].[Food].[Eggs]}\n"
+ "{[Gender].[M], [Product].[Non-Consumable]}\n"
+ "{[Gender].[M], [Product].[Drink]}\n"
+ "{[Gender].[M], [Product].[Drink].[Dairy]}\n"
+ "{[Gender].[F], [Product].[All Products]}\n"
+ "{[Gender].[F], [Product].[Food]}\n"
+ "{[Gender].[F], [Product].[Food].[Eggs]}\n"
+ "{[Gender].[F], [Product].[Non-Consumable]}\n"
+ "{[Gender].[F], [Product].[Drink]}\n"
+ "{[Gender].[F], [Product].[Drink].[Dairy]}");
}
public void testOrderBug656802() {
// Note:
// 1. [Alcoholic Beverages] collates before [Eggs] and
// [Seafood] because its parent, [Drink], is less
// than [Food]
// 2. [Seattle] generally sorts after [CA] and [OR]
// because invisible parent [WA] is greater.
assertQueryReturns(
"select {[Measures].[Unit Sales], [Measures].[Store Cost], [Measures].[Store Sales]} ON columns, \n"
+ "Order(\n"
+ " ToggleDrillState(\n"
+ " {([Promotion Media].[All Media], [Product].[All Products])},\n"
+ " {[Product].[All Products]}), \n"
+ " [Measures].[Unit Sales], DESC) ON rows \n"
+ "from [Sales] where ([Time].[1997])",
"Axis #0:\n"
+ "{[Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Promotion Media].[All Media], [Product].[All Products]}\n"
+ "{[Promotion Media].[All Media], [Product].[Food]}\n"
+ "{[Promotion Media].[All Media], [Product].[Non-Consumable]}\n"
+ "{[Promotion Media].[All Media], [Product].[Drink]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #1: 191,940\n"
+ "Row #1: 163,270.72\n"
+ "Row #1: 409,035.59\n"
+ "Row #2: 50,236\n"
+ "Row #2: 42,879.28\n"
+ "Row #2: 107,366.33\n"
+ "Row #3: 24,597\n"
+ "Row #3: 19,477.23\n"
+ "Row #3: 48,836.21\n");
}
public void testOrderBug712702_Simplified() {
assertQueryReturns(
"SELECT Order({[Time].[Year].members}, [Measures].[Unit Sales]) on columns\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1998]}\n"
+ "{[Time].[1997]}\n"
+ "Row #0: \n"
+ "Row #0: 266,773\n");
}
public void testOrderBug712702_Original() {
assertQueryReturns(
"with member [Measures].[Average Unit Sales] as 'Avg(Descendants([Time].[Time].CurrentMember, [Time].[Month]), \n"
+ "[Measures].[Unit Sales])' \n"
+ "member [Measures].[Max Unit Sales] as 'Max(Descendants([Time].[Time].CurrentMember, [Time].[Month]), [Measures].[Unit Sales])' \n"
+ "select {[Measures].[Average Unit Sales], [Measures].[Max Unit Sales], [Measures].[Unit Sales]} ON columns, \n"
+ " NON EMPTY Order(\n"
+ " Crossjoin(\n"
+ " {[Store].[USA].[OR].[Portland],\n"
+ " [Store].[USA].[OR].[Salem],\n"
+ " [Store].[USA].[OR].[Salem].[Store 13],\n"
+ " [Store].[USA].[CA].[San Francisco],\n"
+ " [Store].[USA].[CA].[San Diego],\n"
+ " [Store].[USA].[CA].[Beverly Hills],\n"
+ " [Store].[USA].[CA].[Los Angeles],\n"
+ " [Store].[USA].[WA].[Walla Walla],\n"
+ " [Store].[USA].[WA].[Bellingham],\n"
+ " [Store].[USA].[WA].[Yakima],\n"
+ " [Store].[USA].[WA].[Spokane],\n"
+ " [Store].[USA].[WA].[Seattle], \n"
+ " [Store].[USA].[WA].[Bremerton],\n"
+ " [Store].[USA].[WA].[Tacoma]},\n"
+ " [Time].[Year].Members), \n"
+ " [Measures].[Average Unit Sales], ASC) ON rows\n"
+ "from [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Average Unit Sales]}\n"
+ "{[Measures].[Max Unit Sales]}\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[OR].[Portland], [Time].[1997]}\n"
+ "{[Store].[USA].[OR].[Salem], [Time].[1997]}\n"
+ "{[Store].[USA].[OR].[Salem].[Store 13], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Walla Walla], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Bellingham], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Yakima], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Spokane], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Bremerton], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Seattle], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Tacoma], [Time].[1997]}\n"
+ "Row #0: 2,173\n"
+ "Row #0: 2,933\n"
+ "Row #0: 26,079\n"
+ "Row #1: 3,465\n"
+ "Row #1: 5,891\n"
+ "Row #1: 41,580\n"
+ "Row #2: 3,465\n"
+ "Row #2: 5,891\n"
+ "Row #2: 41,580\n"
+ "Row #3: 176\n"
+ "Row #3: 222\n"
+ "Row #3: 2,117\n"
+ "Row #4: 1,778\n"
+ "Row #4: 2,545\n"
+ "Row #4: 21,333\n"
+ "Row #5: 2,136\n"
+ "Row #5: 2,686\n"
+ "Row #5: 25,635\n"
+ "Row #6: 2,139\n"
+ "Row #6: 2,669\n"
+ "Row #6: 25,663\n"
+ "Row #7: 184\n"
+ "Row #7: 301\n"
+ "Row #7: 2,203\n"
+ "Row #8: 186\n"
+ "Row #8: 275\n"
+ "Row #8: 2,237\n"
+ "Row #9: 958\n"
+ "Row #9: 1,163\n"
+ "Row #9: 11,491\n"
+ "Row #10: 1,966\n"
+ "Row #10: 2,634\n"
+ "Row #10: 23,591\n"
+ "Row #11: 2,048\n"
+ "Row #11: 2,623\n"
+ "Row #11: 24,576\n"
+ "Row #12: 2,084\n"
+ "Row #12: 2,304\n"
+ "Row #12: 25,011\n"
+ "Row #13: 2,938\n"
+ "Row #13: 3,818\n"
+ "Row #13: 35,257\n");
}
public void testOrderEmpty() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {},"
+ " [Customers].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testOrderOne() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young]},"
+ " [Customers].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 75\n");
}
public void testOrderKeyEmpty() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {},"
+ " [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testOrderKeyOne() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young]},"
+ " [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 75\n");
}
public void testOrderDesc() {
// based on olap4j's OlapTest.testSortDimension
assertQueryReturns(
"SELECT\n"
+ "{[Measures].[Store Sales]} ON COLUMNS,\n"
+ "{Order(\n"
+ " {{[Product].[Drink], [Product].[Drink].Children}},\n"
+ " [Product].CurrentMember.Name,\n"
+ " DESC)} ON ROWS\n"
+ "FROM [Sales]\n"
+ "WHERE {[Time].[1997].[Q3].[7]}",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "Row #0: 4,409.58\n"
+ "Row #1: 629.69\n"
+ "Row #2: 2,477.02\n"
+ "Row #3: 1,302.87\n");
}
public void testOrderMemberMemberValueExpNew() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey,
true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
} finally {
if (context != null) {
context.close();
}
}
}
public void testOrderMemberMemberValueExpNew1() {
// sort by default measure
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderMemberDefaultFlag1() {
// flags not specified default to ASC - sort by default measure
assertQueryReturns(
"with \n"
+ " Member [Measures].[Zero] as '0' \n"
+ "select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.OrderKey) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
}
public void testOrderMemberDefaultFlag2() {
// flags not specified default to ASC
assertQueryReturns(
"with \n"
+ " Member [Measures].[Zero] as '0' \n"
+ "select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Measures].[Store Cost]) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testOrderMemberMemberValueExpHierarchy() {
// Santa Monica and Woodland Hills both don't have orderkey
// members are sorted by the order of their keys
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.OrderKey, DESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testOrderMemberMultiKeysMemberValueExp1() {
// sort by unit sales and then customer id (Adeline = 6442, Abe = 570)
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Measures].[Unit Sales], BDESC, [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n");
}
public void testOrderMemberMultiKeysMemberValueExp2() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.Parent.Parent.OrderKey, BASC, [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderMemberMultiKeysMemberValueExpDepends() {
// should preserve order of Abe and Adeline (note second key is [Time])
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Measures].[Unit Sales], BDESC, [Time].[Time].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n");
}
public void testOrderTupleSingleKeysNew() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]})'\n"
+ "select \n"
+ " Order([NECJ], [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun], [Store].[USA].[CA]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young], [Store].[USA].[CA]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel], [Store].[USA].[WA].[Seattle]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderTupleSingleKeysNew1() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]})'\n"
+ "select \n"
+ " Order([NECJ], [Store].currentMember.OrderKey, DESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel], [Store].[USA].[WA].[Seattle]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young], [Store].[USA].[CA]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun], [Store].[USA].[CA]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderTupleMultiKeys1() {
assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Store].[USA].[CA],\n"
+ " [Store].[USA].[WA]},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " Order([NECJ], [Store].currentMember.OrderKey, BDESC, [Measures].[Unit Sales], BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testOrderTupleMultiKeys2() {
assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Store].[USA].[CA],\n"
+ " [Store].[USA].[WA]},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " Order([NECJ], [Measures].[Unit Sales], BDESC, Ancestor([Customers].currentMember, [Customers].[Name]).OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Store].[USA].[WA], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n");
}
public void testOrderTupleMultiKeys3() {
// WA unit sales is greater than CA unit sales
// Santa Monica unit sales (2660) is greater that Woodland hills (2516)
assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Store].[USA].[CA],\n"
+ " [Store].[USA].[WA]},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " Order([NECJ], [Measures].[Unit Sales], DESC, Ancestor([Customers].currentMember, [Customers].[Name]), BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
}
public void testOrderTupleMultiKeyswithVCube() {
// WA unit sales is greater than CA unit sales
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
// a non-sense cube just to test ordering by order key
TestContext context = TestContext.instance().create(
null,
null,
"<VirtualCube name=\"Sales vs HR\">\n"
+ "<VirtualCubeDimension cubeName=\"Sales\" name=\"Customers\"/>\n"
+ "<VirtualCubeDimension cubeName=\"HR\" name=\"Position\"/>\n"
+ "<VirtualCubeMeasure cubeName=\"HR\" name=\"[Measures].[Org Salary]\"/>\n"
+ "</VirtualCube>",
null, null, null);
context.assertQueryReturns(
"with \n"
+ " set [CJ] as \n"
+ " 'CrossJoin( \n"
+ " {[Position].[Store Management].children},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " [Measures].[Org Salary] on columns, \n"
+ " Order([CJ], [Position].currentMember.OrderKey, BASC, Ancestor([Customers].currentMember, [Customers].[Name]).OrderKey, BDESC) \n"
+ "on rows \n"
+ "from [Sales vs HR]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Org Salary]}\n"
+ "Axis #2:\n"
+ "{[Position].[Store Management].[Store Manager], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Position].[Store Management].[Store Manager], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Position].[Store Management].[Store Manager], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Position].[Store Management].[Store Assistant Manager], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Position].[Store Management].[Store Assistant Manager], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Position].[Store Management].[Store Assistant Manager], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Position].[Store Management].[Store Shift Supervisor], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Position].[Store Management].[Store Shift Supervisor], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Position].[Store Management].[Store Shift Supervisor], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: \n"
+ "Row #1: \n"
+ "Row #2: \n"
+ "Row #3: \n"
+ "Row #4: \n"
+ "Row #5: \n"
+ "Row #6: \n"
+ "Row #7: \n"
+ "Row #8: \n");
}
public void testOrderConstant1() {
// sort by customerId (Abel = 7851, Adeline = 6442, Abe = 570)
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].[USA].OrderKey, BDESC, [Customers].currentMember.OrderKey, BASC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
}
public void testOrderDiffrentDim() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Product].currentMember.OrderKey, BDESC, [Gender].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testUnorder() {
assertAxisReturns(
"Unorder([Gender].members)",
"[Gender].[All Gender]\n"
+ "[Gender].[F]\n"
+ "[Gender].[M]");
assertAxisReturns(
"Unorder(Order([Gender].members, -[Measures].[Unit Sales]))",
"[Gender].[All Gender]\n"
+ "[Gender].[M]\n"
+ "[Gender].[F]");
assertAxisReturns(
"Unorder(Crossjoin([Gender].members, [Marital Status].Children))",
"{[Gender].[All Gender], [Marital Status].[M]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[M]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "{[Gender].[M], [Marital Status].[M]}\n"
+ "{[Gender].[M], [Marital Status].[S]}");
// implicitly convert member to set
assertAxisReturns(
"Unorder([Gender].[M])",
"[Gender].[M]");
assertAxisThrows(
"Unorder(1 + 3)",
"No function matches signature 'Unorder(<Numeric Expression>)'");
assertAxisThrows(
"Unorder([Gender].[M], 1 + 3)",
"No function matches signature 'Unorder(<Member>, <Numeric Expression>)'");
assertQueryReturns(
"select {[Measures].[Store Sales], [Measures].[Unit Sales]} on 0,\n"
+ " Unorder([Gender].Members) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 266,773\n"
+ "Row #1: 280,226.21\n"
+ "Row #1: 131,558\n"
+ "Row #2: 285,011.92\n"
+ "Row #2: 135,215\n");
}
public void testSiblingsA() {
assertAxisReturns(
"{[Time].[1997].Siblings}",
"[Time].[1997]\n"
+ "[Time].[1998]");
}
public void testSiblingsB() {
assertAxisReturns(
"{[Store].Siblings}",
"[Store].[All Stores]");
}
public void testSiblingsC() {
assertAxisReturns(
"{[Store].[USA].[CA].Siblings}",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
}
public void testSiblingsD() {
// The null member has no siblings -- not even itself
assertAxisReturns("{[Gender].Parent.Siblings}", "");
assertExprReturns(
"count ([Gender].parent.siblings, includeempty)", "0");
}
public void testSubset() {
assertAxisReturns(
"Subset([Promotion Media].Children, 7, 2)",
"[Promotion Media].[Product Attachment]\n"
+ "[Promotion Media].[Radio]");
}
public void testSubsetNegativeCount() {
assertAxisReturns(
"Subset([Promotion Media].Children, 3, -1)",
"");
}
public void testSubsetNegativeStart() {
assertAxisReturns(
"Subset([Promotion Media].Children, -2, 4)",
"");
}
public void testSubsetDefault() {
assertAxisReturns(
"Subset([Promotion Media].Children, 11)",
"[Promotion Media].[Sunday Paper, Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]\n"
+ "[Promotion Media].[TV]");
}
public void testSubsetOvershoot() {
assertAxisReturns(
"Subset([Promotion Media].Children, 15)",
"");
}
public void testSubsetEmpty() {
assertAxisReturns(
"Subset([Gender].[F].Children, 1)",
"");
assertAxisReturns(
"Subset([Gender].[F].Children, 1, 3)",
"");
}
public void testTail() {
assertAxisReturns(
"Tail([Store].Children, 2)",
"[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testTailNegative() {
assertAxisReturns(
"Tail([Store].Children, 2 - 3)",
"");
}
public void testTailDefault() {
assertAxisReturns(
"Tail([Store].Children)",
"[Store].[USA]");
}
public void testTailOvershoot() {
assertAxisReturns(
"Tail([Store].Children, 2 + 2)",
"[Store].[Canada]\n"
+ "[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testTailEmpty() {
assertAxisReturns(
"Tail([Gender].[F].Children, 2)",
"");
assertAxisReturns(
"Tail([Gender].[F].Children)",
"");
}
public void testToggleDrillState() {
assertAxisReturns(
"ToggleDrillState({[Customers].[USA],[Customers].[Canada]},"
+ "{[Customers].[USA],[Customers].[USA].[CA]})",
"[Customers].[USA]\n"
+ "[Customers].[USA].[CA]\n"
+ "[Customers].[USA].[OR]\n"
+ "[Customers].[USA].[WA]\n"
+ "[Customers].[Canada]");
}
public void testToggleDrillState2() {
assertAxisReturns(
"ToggleDrillState([Product].[Product Department].members, "
+ "{[Product].[All Products].[Food].[Snack Foods]})",
"[Product].[Drink].[Alcoholic Beverages]\n"
+ "[Product].[Drink].[Beverages]\n"
+ "[Product].[Drink].[Dairy]\n"
+ "[Product].[Food].[Baked Goods]\n"
+ "[Product].[Food].[Baking Goods]\n"
+ "[Product].[Food].[Breakfast Foods]\n"
+ "[Product].[Food].[Canned Foods]\n"
+ "[Product].[Food].[Canned Products]\n"
+ "[Product].[Food].[Dairy]\n"
+ "[Product].[Food].[Deli]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Food].[Frozen Foods]\n"
+ "[Product].[Food].[Meat]\n"
+ "[Product].[Food].[Produce]\n"
+ "[Product].[Food].[Seafood]\n"
+ "[Product].[Food].[Snack Foods]\n"
+ "[Product].[Food].[Snack Foods].[Snack Foods]\n"
+ "[Product].[Food].[Snacks]\n"
+ "[Product].[Food].[Starchy Foods]\n"
+ "[Product].[Non-Consumable].[Carousel]\n"
+ "[Product].[Non-Consumable].[Checkout]\n"
+ "[Product].[Non-Consumable].[Health and Hygiene]\n"
+ "[Product].[Non-Consumable].[Household]\n"
+ "[Product].[Non-Consumable].[Periodicals]");
}
public void testToggleDrillState3() {
assertAxisReturns(
"ToggleDrillState("
+ "{[Time].[1997].[Q1],"
+ " [Time].[1997].[Q2],"
+ " [Time].[1997].[Q2].[4],"
+ " [Time].[1997].[Q2].[6],"
+ " [Time].[1997].[Q3]},"
+ "{[Time].[1997].[Q2]})",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]");
}
// bug 634860
public void testToggleDrillStateTuple() {
assertAxisReturns(
"ToggleDrillState(\n"
+ "{([Store].[USA].[CA],"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]),\n"
+ " ([Store].[USA],"
+ " [Product].[All Products].[Drink])},\n"
+ "{[Store].[All stores].[USA].[CA]})",
"{[Store].[USA].[CA], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA], [Product].[Drink]}");
}
public void testToggleDrillStateRecursive() {
// We expect this to fail.
assertQueryThrows(
"Select \n"
+ " ToggleDrillState(\n"
+ " {[Store].[USA]}, \n"
+ " {[Store].[USA]}, recursive) on Axis(0) \n"
+ "from [Sales]\n",
"'RECURSIVE' is not supported in ToggleDrillState.");
}
public void testTopCount() {
assertAxisReturns(
"TopCount({[Promotion Media].[Media Type].members}, 2, [Measures].[Unit Sales])",
"[Promotion Media].[No Media]\n"
+ "[Promotion Media].[Daily Paper, Radio, TV]");
}
public void testTopCountTuple() {
assertAxisReturns(
"TopCount([Customers].[Name].members,2,(Time.[1997].[Q1],[Measures].[Store Sales]))",
"[Customers].[USA].[WA].[Spokane].[Grace McLaughlin]\n"
+ "[Customers].[USA].[WA].[Spokane].[Matt Bellah]");
}
public void testTopCountEmpty() {
assertAxisReturns(
"TopCount(Filter({[Promotion Media].[Media Type].members}, 1=0), 2, [Measures].[Unit Sales])",
"");
}
public void testTopCountDepends() {
checkTopBottomCountPercentDepends("TopCount");
checkTopBottomCountPercentDepends("TopPercent");
checkTopBottomCountPercentDepends("TopSum");
checkTopBottomCountPercentDepends("BottomCount");
checkTopBottomCountPercentDepends("BottomPercent");
checkTopBottomCountPercentDepends("BottomSum");
}
private void checkTopBottomCountPercentDepends(String fun) {
String s1 =
TestContext.allHiersExcept("[Measures]", "[Promotion Media]");
getTestContext().assertSetExprDependsOn(
fun
+ "({[Promotion Media].[Media Type].members}, "
+ "2, [Measures].[Unit Sales])",
s1);
if (fun.endsWith("Count")) {
getTestContext().assertSetExprDependsOn(
fun + "({[Promotion Media].[Media Type].members}, 2)",
"{}");
}
}
/**
* Tests TopCount applied to a large result set.
*
* <p>Before optimizing (see FunUtil.partialSort), on a 2-core 32-bit 2.4GHz
* machine, the 1st query took 14.5 secs, the 2nd query took 5.0 secs.
* After optimizing, who knows?
*/
public void testTopCountHuge() {
// TODO convert printfs to trace
final String query =
"SELECT [Measures].[Store Sales] ON 0,\n"
+ "TopCount([Time].[Month].members * "
+ "[Customers].[Name].members, 3, [Measures].[Store Sales]) ON 1\n"
+ "FROM [Sales]";
final String desiredResult =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1].[3], [Customers].[USA].[WA].[Spokane].[George Todero]}\n"
+ "{[Time].[1997].[Q3].[7], [Customers].[USA].[WA].[Spokane].[James Horvat]}\n"
+ "{[Time].[1997].[Q4].[11], [Customers].[USA].[WA].[Olympia].[Charles Stanley]}\n"
+ "Row #0: 234.83\n"
+ "Row #1: 199.46\n"
+ "Row #2: 191.90\n";
long now = System.currentTimeMillis();
assertQueryReturns(query, desiredResult);
LOGGER.info("first query took " + (System.currentTimeMillis() - now));
now = System.currentTimeMillis();
assertQueryReturns(query, desiredResult);
LOGGER.info("second query took " + (System.currentTimeMillis() - now));
}
public void testTopPercent() {
assertAxisReturns(
"TopPercent({[Promotion Media].[Media Type].members}, 70, [Measures].[Unit Sales])",
"[Promotion Media].[No Media]");
}
// todo: test precision
public void testTopSum() {
assertAxisReturns(
"TopSum({[Promotion Media].[Media Type].members}, 200000, [Measures].[Unit Sales])",
"[Promotion Media].[No Media]\n"
+ "[Promotion Media].[Daily Paper, Radio, TV]");
}
public void testTopSumEmpty() {
assertAxisReturns(
"TopSum(Filter({[Promotion Media].[Media Type].members}, 1=0), "
+ "200000, [Measures].[Unit Sales])",
"");
}
public void testUnionAll() {
assertAxisReturns(
"Union({[Gender].[M]}, {[Gender].[F]}, ALL)",
"[Gender].[M]\n"
+ "[Gender].[F]"); // order is preserved
}
public void testUnionAllTuple() {
// With the bug, the last 8 rows are repeated.
assertQueryReturns(
"with \n"
+ "set [Set1] as 'Crossjoin({[Time].[1997].[Q1]:[Time].[1997].[Q4]},{[Store].[USA].[CA]:[Store].[USA].[OR]})'\n"
+ "set [Set2] as 'Crossjoin({[Time].[1997].[Q2]:[Time].[1997].[Q3]},{[Store].[Mexico].[DF]:[Store].[Mexico].[Veracruz]})'\n"
+ "select \n"
+ "{[Measures].[Unit Sales]} ON COLUMNS,\n"
+ "Union([Set1], [Set2], ALL) ON ROWS\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q1], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q2], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q2], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q3], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q3], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q4], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q4], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[DF]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[Guerrero]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[Jalisco]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[Veracruz]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[DF]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[Guerrero]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[Jalisco]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[Veracruz]}\n"
+ "Row #0: 16,890\n"
+ "Row #1: 19,287\n"
+ "Row #2: 18,052\n"
+ "Row #3: 15,079\n"
+ "Row #4: 18,370\n"
+ "Row #5: 16,940\n"
+ "Row #6: 21,436\n"
+ "Row #7: 16,353\n"
+ "Row #8: \n"
+ "Row #9: \n"
+ "Row #10: \n"
+ "Row #11: \n"
+ "Row #12: \n"
+ "Row #13: \n"
+ "Row #14: \n"
+ "Row #15: \n");
}
public void testUnion() {
assertAxisReturns(
"Union({[Store].[USA], [Store].[USA], [Store].[USA].[OR]}, "
+ "{[Store].[USA].[CA], [Store].[USA]})",
"[Store].[USA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[CA]");
}
public void testUnionEmptyBoth() {
assertAxisReturns(
"Union({}, {})",
"");
}
public void testUnionEmptyRight() {
assertAxisReturns(
"Union({[Gender].[M]}, {})",
"[Gender].[M]");
}
public void testUnionTuple() {
assertAxisReturns(
"Union({"
+ " ([Gender].[M], [Marital Status].[S]),"
+ " ([Gender].[F], [Marital Status].[S])"
+ "}, {"
+ " ([Gender].[M], [Marital Status].[M]),"
+ " ([Gender].[M], [Marital Status].[S])"
+ "})",
"{[Gender].[M], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "{[Gender].[M], [Marital Status].[M]}");
}
public void testUnionTupleDistinct() {
assertAxisReturns(
"Union({"
+ " ([Gender].[M], [Marital Status].[S]),"
+ " ([Gender].[F], [Marital Status].[S])"
+ "}, {"
+ " ([Gender].[M], [Marital Status].[M]),"
+ " ([Gender].[M], [Marital Status].[S])"
+ "}, Distinct)",
"{[Gender].[M], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "{[Gender].[M], [Marital Status].[M]}");
}
public void testUnionQuery() {
Result result = executeQuery(
"select {[Measures].[Unit Sales], "
+ "[Measures].[Store Cost], "
+ "[Measures].[Store Sales]} on columns,\n"
+ " Hierarchize(\n"
+ " Union(\n"
+ " Crossjoin(\n"
+ " Crossjoin([Gender].[All Gender].children,\n"
+ " [Marital Status].[All Marital Status].children),\n"
+ " Crossjoin([Customers].[All Customers].children,\n"
+ " [Product].[All Products].children) ),\n"
+ " Crossjoin({([Gender].[All Gender].[M], [Marital Status].[All Marital Status].[M])},\n"
+ " Crossjoin(\n"
+ " [Customers].[All Customers].[USA].children,\n"
+ " [Product].[All Products].children) ) )) on rows\n"
+ "from Sales where ([Time].[1997])");
final Axis rowsAxis = result.getAxes()[1];
Assert.assertEquals(45, rowsAxis.getPositions().size());
}
public void testItemMember() {
assertExprReturns(
"Descendants([Time].[1997], [Time].[Month]).Item(1).Item(0).UniqueName",
"[Time].[1997].[Q1].[2]");
// Access beyond the list yields the Null member.
if (isDefaultNullMemberRepresentation()) {
assertExprReturns(
"[Time].[1997].Children.Item(6).UniqueName", "[Time].[#null]");
assertExprReturns(
"[Time].[1997].Children.Item(-1).UniqueName", "[Time].[#null]");
}
}
public void testItemTuple() {
assertExprReturns(
"CrossJoin([Gender].[All Gender].children, "
+ "[Time].[1997].[Q2].children).Item(0).Item(1).UniqueName",
"[Time].[1997].[Q2].[4]");
}
public void testStrToMember() {
assertExprReturns(
"StrToMember(\"[Time].[1997].[Q2].[4]\").Name",
"4");
}
public void testStrToMemberUniqueName() {
assertExprReturns(
"StrToMember(\"[Store].[USA].[CA]\").Name",
"CA");
}
public void testStrToMemberFullyQualifiedName() {
assertExprReturns(
"StrToMember(\"[Store].[All Stores].[USA].[CA]\").Name",
"CA");
}
public void testStrToMemberNull() {
// SSAS 2005 gives "#Error An MDX expression was expected. An empty
// expression was specified."
assertExprThrows(
"StrToMember(null).Name",
"An MDX expression was expected. An empty expression was specified");
assertExprThrows(
"StrToSet(null, [Gender]).Count",
"An MDX expression was expected. An empty expression was specified");
assertExprThrows(
"StrToTuple(null, [Gender]).Name",
"An MDX expression was expected. An empty expression was specified");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-560">
* bug MONDRIAN-560, "StrToMember function doesn't use IgnoreInvalidMembers
* option"</a>.
*/
public void testStrToMemberIgnoreInvalidMembers() {
final MondrianProperties properties = MondrianProperties.instance();
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, true);
// [Product].[Drugs] is invalid, becomes null member, and is dropped
// from list
assertQueryReturns(
"select \n"
+ " {[Product].[Food],\n"
+ " StrToMember(\"[Product].[Drugs]\")} on columns,\n"
+ " {[Measures].[Unit Sales]} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[Food]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Row #0: 191,940\n");
// Hierarchy is inferred from leading edge
assertExprReturns(
"StrToMember(\"[Marital Status].[Separated]\").Hierarchy.Name",
"Marital Status");
// Null member is returned
assertExprReturns(
"StrToMember(\"[Marital Status].[Separated]\").Name",
"#null");
// Use longest valid prefix, so get [Time].[Weekly] rather than just
// [Time].
final String timeWeekly = TestContext.hierarchyName("Time", "Weekly");
assertExprReturns(
"StrToMember(\"" + timeWeekly
+ ".[1996].[Q1]\").Hierarchy.UniqueName",
timeWeekly);
// If hierarchy is invalid, throw an error even though
// IgnoreInvalidMembersDuringQuery is set.
assertExprThrows(
"StrToMember(\"[Unknown Hierarchy].[Invalid].[Member]\").Name",
"MDX object '[Unknown Hierarchy].[Invalid].[Member]' not found in cube 'Sales'");
assertExprThrows(
"StrToMember(\"[Unknown Hierarchy].[Invalid]\").Name",
"MDX object '[Unknown Hierarchy].[Invalid]' not found in cube 'Sales'");
assertExprThrows(
"StrToMember(\"[Unknown Hierarchy]\").Name",
"MDX object '[Unknown Hierarchy]' not found in cube 'Sales'");
assertAxisThrows(
"StrToMember(\"\")",
"MDX object '' not found in cube 'Sales'");
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, false);
assertQueryThrows(
"select \n"
+ " {[Product].[Food],\n"
+ " StrToMember(\"[Product].[Drugs]\")} on columns,\n"
+ " {[Measures].[Unit Sales]} on rows\n"
+ "from [Sales]",
"Member '[Product].[Drugs]' not found");
assertExprThrows(
"StrToMember(\"[Marital Status].[Separated]\").Hierarchy.Name",
"Member '[Marital Status].[Separated]' not found");
}
public void testStrToTuple() {
// single dimension yields member
assertAxisReturns(
"{StrToTuple(\"[Time].[1997].[Q2]\", [Time])}",
"[Time].[1997].[Q2]");
// multiple dimensions yield tuple
assertAxisReturns(
"{StrToTuple(\"([Gender].[F], [Time].[1997].[Q2])\", [Gender], [Time])}",
"{[Gender].[F], [Time].[1997].[Q2]}");
// todo: test for garbage at end of string
}
public void testStrToTupleIgnoreInvalidMembers() {
final MondrianProperties properties = MondrianProperties.instance();
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, true);
// If any member is invalid, the whole tuple is null.
assertAxisReturns(
"StrToTuple(\"([Gender].[M], [Marital Status].[Separated])\","
+ " [Gender], [Marital Status])",
"");
}
public void testStrToTupleDuHierarchiesFails() {
assertAxisThrows(
"{StrToTuple(\"([Gender].[F], [Time].[1997].[Q2], [Gender].[M])\", [Gender], [Time], [Gender])}",
"Tuple contains more than one member of hierarchy '[Gender]'.");
}
public void testStrToTupleDupHierInSameDimensions() {
assertAxisThrows(
"{StrToTuple("
+ "\"([Gender].[F], "
+ "[Time].[1997].[Q2], "
+ "[Time].[Weekly].[1997].[10])\","
+ " [Gender], "
+ TestContext.hierarchyName("Time", "Weekly")
+ ", [Gender])}",
"Tuple contains more than one member of hierarchy '[Gender]'.");
}
public void testStrToTupleDepends() {
getTestContext().assertMemberExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2]\", [Time])",
"{}");
// converted to scalar, depends set is larger
getTestContext().assertExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2]\", [Time])",
TestContext.allHiersExcept("[Time]"));
getTestContext().assertMemberExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2], [Gender].[F]\", [Time], [Gender])",
"{}");
getTestContext().assertExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2], [Gender].[F]\", [Time], [Gender])",
TestContext.allHiersExcept("[Time]", "[Gender]"));
}
public void testStrToSet() {
// TODO: handle text after '}'
// TODO: handle string which ends too soon
// TODO: handle spaces before first '{'
// TODO: test spaces before unbracketed names,
// e.g. "{Gender. M, Gender. F }".
assertAxisReturns(
"StrToSet("
+ " \"{[Gender].[F], [Gender].[M]}\","
+ " [Gender])",
"[Gender].[F]\n"
+ "[Gender].[M]");
assertAxisThrows(
"StrToSet("
+ " \"{[Gender].[F], [Time].[1997]}\","
+ " [Gender])",
"member is of wrong hierarchy");
// whitespace ok
assertAxisReturns(
"StrToSet("
+ " \" { [Gender] . [F] ,[Gender].[M] } \","
+ " [Gender])",
"[Gender].[F]\n"
+ "[Gender].[M]");
// tuples
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " ([Gender].[F], [Time].[1997].[Q2]), "
+ " ([Gender].[M], [Time].[1997])"
+ "}"
+ "\","
+ " [Gender],"
+ " [Time])",
"{[Gender].[F], [Time].[1997].[Q2]}\n"
+ "{[Gender].[M], [Time].[1997]}");
// matches unique name
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " [Store].[USA].[CA], "
+ " [Store].[All Stores].[USA].OR,"
+ " [Store].[All Stores]. [USA] . [WA]"
+ "}"
+ "\","
+ " [Store])",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
}
public void testStrToSetDupDimensionsFails() {
assertAxisThrows(
"StrToSet("
+ "\""
+ "{"
+ " ([Gender].[F], [Time].[1997].[Q2], [Gender].[F]), "
+ " ([Gender].[M], [Time].[1997], [Gender].[F])"
+ "}"
+ "\","
+ " [Gender],"
+ " [Time],"
+ " [Gender])",
"Tuple contains more than one member of hierarchy '[Gender]'.");
}
public void testStrToSetIgnoreInvalidMembers() {
final MondrianProperties properties = MondrianProperties.instance();
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, true);
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " [Product].[Food],"
+ " [Product].[Food].[You wouldn't like],"
+ " [Product].[Drink].[You would like],"
+ " [Product].[Drink].[Dairy]"
+ "}"
+ "\","
+ " [Product])",
"[Product].[Food]\n"
+ "[Product].[Drink].[Dairy]");
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " ([Gender].[M], [Product].[Food]),"
+ " ([Gender].[F], [Product].[Food].[You wouldn't like]),"
+ " ([Gender].[M], [Product].[Drink].[You would like]),"
+ " ([Gender].[F], [Product].[Drink].[Dairy])"
+ "}"
+ "\","
+ " [Gender], [Product])",
"{[Gender].[M], [Product].[Food]}\n"
+ "{[Gender].[F], [Product].[Drink].[Dairy]}");
}
public void testYtd() {
assertAxisReturns(
"Ytd()",
"[Time].[1997]");
assertAxisReturns(
"Ytd([Time].[1997].[Q3])",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]");
assertAxisReturns(
"Ytd([Time].[1997].[Q2].[4])",
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]");
assertAxisThrows(
"Ytd([Store])",
"Argument to function 'Ytd' must belong to Time hierarchy");
getTestContext().assertSetExprDependsOn(
"Ytd()",
"{[Time], " + TimeWeekly + "}");
getTestContext().assertSetExprDependsOn(
"Ytd([Time].[1997].[Q2])",
"{}");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-458">
* bug MONDRIAN-458, "error deducing type of Ytd/Qtd/Mtd functions within
* Generate"</a>.
*/
public void testGeneratePlusXtd() {
assertAxisReturns(
"generate(\n"
+ " {[Time].[1997].[Q1].[2], [Time].[1997].[Q3].[7]},\n"
+ " {Ytd( [Time].[Time].currentMember)})",
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]");
assertAxisReturns(
"generate(\n"
+ " {[Time].[1997].[Q1].[2], [Time].[1997].[Q3].[7]},\n"
+ " {Ytd( [Time].[Time].currentMember)}, ALL)",
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]");
assertExprReturns(
"count(generate({[Time].[1997].[Q4].[11]},"
+ " {Qtd( [Time].[Time].currentMember)}))",
2, 0);
assertExprReturns(
"count(generate({[Time].[1997].[Q4].[11]},"
+ " {Mtd( [Time].[Time].currentMember)}))",
1, 0);
}
public void testQtd() {
// zero args
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(Qtd()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[4], [Time].[1997].[Q2].[5]}\n");
// one arg, a month
assertAxisReturns(
"Qtd([Time].[1997].[Q2].[5])",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]");
// one arg, a quarter
assertAxisReturns(
"Qtd([Time].[1997].[Q2])",
"[Time].[1997].[Q2]");
// one arg, a year
assertAxisReturns(
"Qtd([Time].[1997])",
"");
assertAxisThrows(
"Qtd([Store])",
"Argument to function 'Qtd' must belong to Time hierarchy");
}
public void testMtd() {
// zero args
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(Mtd()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[5]}\n");
// one arg, a month
assertAxisReturns(
"Mtd([Time].[1997].[Q2].[5])",
"[Time].[1997].[Q2].[5]");
// one arg, a quarter
assertAxisReturns(
"Mtd([Time].[1997].[Q2])",
"");
// one arg, a year
assertAxisReturns(
"Mtd([Time].[1997])",
"");
assertAxisThrows(
"Mtd([Store])",
"Argument to function 'Mtd' must belong to Time hierarchy");
}
public void testPeriodsToDate() {
getTestContext().assertSetExprDependsOn("PeriodsToDate()", "{[Time]}");
getTestContext().assertSetExprDependsOn(
"PeriodsToDate([Time].[Year])",
"{[Time]}");
getTestContext().assertSetExprDependsOn(
"PeriodsToDate([Time].[Year], [Time].[1997].[Q2].[5])", "{}");
// two args
assertAxisReturns(
"PeriodsToDate([Time].[Quarter], [Time].[1997].[Q2].[5])",
"[Time].[1997].[Q2].[4]\n" + "[Time].[1997].[Q2].[5]");
// equivalent to above
assertAxisReturns(
"TopCount("
+ " Descendants("
+ " Ancestor("
+ " [Time].[1997].[Q2].[5], [Time].[Quarter]),"
+ " [Time].[1997].[Q2].[5].Level),"
+ " 1).Item(0) : [Time].[1997].[Q2].[5]",
"[Time].[1997].[Q2].[4]\n" + "[Time].[1997].[Q2].[5]");
// one arg
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(PeriodsToDate([Time].[Quarter])) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[4], [Time].[1997].[Q2].[5]}\n");
// zero args
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(PeriodsToDate()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[4], [Time].[1997].[Q2].[5]}\n");
// zero args, evaluated at a member which is at the top level.
// The default level is the level above the current member -- so
// choosing a member at the highest level might trip up the
// implementation.
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(PeriodsToDate()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997]",
"Axis #0:\n"
+ "{[Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {}\n");
// Testcase for bug 1598379, which caused NPE because the args[0].type
// knew its dimension but not its hierarchy.
assertQueryReturns(
"with member [Measures].[Position] as\n"
+ " 'Sum("
+ "PeriodsToDate([Time].[Time].Levels(0),"
+ " [Time].[Time].CurrentMember), "
+ "[Measures].[Store Sales])'\n"
+ "select {[Time].[1997],\n"
+ " [Time].[1997].[Q1],\n"
+ " [Time].[1997].[Q1].[1],\n"
+ " [Time].[1997].[Q1].[2],\n"
+ " [Time].[1997].[Q1].[3]} ON COLUMNS,\n"
+ "{[Measures].[Store Sales], [Measures].[Position] } ON ROWS\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Position]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 139,628.35\n"
+ "Row #0: 45,539.69\n"
+ "Row #0: 44,058.79\n"
+ "Row #0: 50,029.87\n"
+ "Row #1: 565,238.13\n"
+ "Row #1: 139,628.35\n"
+ "Row #1: 45,539.69\n"
+ "Row #1: 89,598.48\n"
+ "Row #1: 139,628.35\n");
assertQueryReturns(
"select\n"
+ "{[Measures].[Unit Sales]} on columns,\n"
+ "periodstodate(\n"
+ " [Product].[Product Category],\n"
+ " [Product].[Food].[Baked Goods].[Bread].[Muffins]) on rows\n"
+ "from [Sales]\n"
+ "",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 815\n"
+ "Row #1: 3,497\n"
+ "");
// TODO: enable
if (false) {
assertExprThrows(
"Sum(PeriodsToDate([Time.Weekly].[Year], [Time].CurrentMember), [Measures].[Unit Sales])",
"wrong dimension");
}
}
public void testSetToStr() {
assertExprReturns(
"SetToStr([Time].[Time].children)",
"{[Time].[1997].[Q1], [Time].[1997].[Q2], [Time].[1997].[Q3], [Time].[1997].[Q4]}");
// Now, applied to tuples
assertExprReturns(
"SetToStr({CrossJoin([Marital Status].children, {[Gender].[M]})})",
"{"
+ "([Marital Status].[M], [Gender].[M]), "
+ "([Marital Status].[S], [Gender].[M])"
+ "}");
}
public void testTupleToStr() {
// Applied to a dimension (which becomes a member)
assertExprReturns(
"TupleToStr([Product])",
"[Product].[All Products]");
// Applied to a dimension (invalid because has no default hierarchy)
if (MondrianProperties.instance().SsasCompatibleNaming.get()) {
assertExprThrows(
"TupleToStr([Time])",
"The 'Time' dimension contains more than one hierarchy, "
+ "therefore the hierarchy must be explicitly specified.");
} else {
assertExprReturns(
"TupleToStr([Time])",
"[Time].[1997]");
}
// Applied to a hierarchy
assertExprReturns(
"TupleToStr([Time].[Time])",
"[Time].[1997]");
// Applied to a member
assertExprReturns(
"TupleToStr([Store].[USA].[OR])",
"[Store].[USA].[OR]");
// Applied to a member (extra set of parens)
assertExprReturns(
"TupleToStr(([Store].[USA].[OR]))",
"[Store].[USA].[OR]");
// Now, applied to a tuple
assertExprReturns(
"TupleToStr(([Marital Status], [Gender].[M]))",
"([Marital Status].[All Marital Status], [Gender].[M])");
// Applied to a tuple containing a null member
assertExprReturns(
"TupleToStr(([Marital Status], [Gender].Parent))",
"");
// Applied to a null member
assertExprReturns(
"TupleToStr([Marital Status].Parent)",
"");
}
/**
* Executes a scalar expression, and asserts that the result is as
* expected. For example, <code>assertExprReturns("1 + 2", "3")</code>
* should succeed.
*/
public void assertExprReturns(String expr, String expected) {
String actual = executeExpr(expr);
assertEquals(expected, actual);
}
/**
* Executes a scalar expression, and asserts that the result is within
* delta of the expected result.
*
* @param expr MDX scalar expression
* @param expected Expected value
* @param delta Maximum allowed deviation from expected value
*/
public void assertExprReturns(
String expr, double expected, double delta)
{
Object value = getTestContext().executeExprRaw(expr).getValue();
try {
double actual = ((Number) value).doubleValue();
if (Double.isNaN(expected) && Double.isNaN(actual)) {
return;
}
Assert.assertEquals(
null,
expected,
actual,
delta);
} catch (ClassCastException ex) {
String msg = "Actual value \"" + value + "\" is not a number.";
throw new ComparisonFailure(
msg, Double.toString(expected), String.valueOf(value));
}
}
/**
* Compiles a scalar expression, and asserts that the program looks as
* expected.
*/
public void assertExprCompilesTo(
String expr,
String expectedCalc)
{
final String actualCalc =
getTestContext().compileExpression(expr, true);
final int expDeps =
MondrianProperties.instance().TestExpDependencies.get();
if (expDeps > 0) {
// Don't bother checking the compiled output if we are also
// testing dependencies. The compiled code will have extra
// 'DependencyTestingCalc' instances embedded in it.
return;
}
TestContext.assertEqualsVerbose(expectedCalc, actualCalc);
}
/**
* Compiles a set expression, and asserts that the program looks as
* expected.
*/
public void assertAxisCompilesTo(
String expr,
String expectedCalc)
{
final String actualCalc =
getTestContext().compileExpression(expr, false);
final int expDeps =
MondrianProperties.instance().TestExpDependencies.get();
if (expDeps > 0) {
// Don't bother checking the compiled output if we are also
// testing dependencies. The compiled code will have extra
// 'DependencyTestingCalc' instances embedded in it.
return;
}
TestContext.assertEqualsVerbose(expectedCalc, actualCalc);
}
/**
* Tests the <code>Rank(member, set)</code> MDX function.
*/
public void testRank() {
// Member within set
assertExprReturns(
"Rank([Store].[USA].[CA], "
+ "{[Store].[USA].[OR],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA]})", "2");
// Member not in set
assertExprReturns(
"Rank([Store].[USA].[WA], "
+ "{[Store].[USA].[OR],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA]})", "0");
// Member not in empty set
assertExprReturns(
"Rank([Store].[USA].[WA], {})", "0");
// Null member not in set returns null.
assertExprReturns(
"Rank([Store].Parent, "
+ "{[Store].[USA].[OR],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA]})", "");
// Null member in empty set. (MSAS returns an error "Formula error -
// dimension count is not valid - in the Rank function" but I think
// null is the correct behavior.)
assertExprReturns(
"Rank([Gender].Parent, {})", "");
// Member occurs twice in set -- pick first
assertExprReturns(
"Rank([Store].[USA].[WA], \n"
+ "{[Store].[USA].[WA],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA],"
+ " [Store].[USA].[WA]})", "1");
// Tuple not in set
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].[M]), \n"
+ "{([Gender].[F], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[M])})", "0");
// Tuple in set
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].[M]), \n"
+ "{([Gender].[F], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[S]),\n"
+ " ([Gender].[F], [Marital Status].[M])})", "3");
// Tuple not in empty set
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].[M]), \n" + "{})", "0");
// Partially null tuple in set, returns null
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].Parent), \n"
+ "{([Gender].[F], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[S]),\n"
+ " ([Gender].[F], [Marital Status].[M])})", "");
}
public void testRankWithExpr() {
// Note that [Good] and [Top Measure] have the same [Unit Sales]
// value (5), but [Good] ranks 1 and [Top Measure] ranks 2. Even though
// they are sorted descending on unit sales, they remain in their
// natural order (member name) because MDX sorts are stable.
assertQueryReturns(
"with member [Measures].[Sibling Rank] as ' Rank([Product].CurrentMember, [Product].CurrentMember.Siblings) '\n"
+ " member [Measures].[Sales Rank] as ' Rank([Product].CurrentMember, Order([Product].Parent.Children, [Measures].[Unit Sales], DESC)) '\n"
+ " member [Measures].[Sales Rank2] as ' Rank([Product].CurrentMember, [Product].Parent.Children, [Measures].[Unit Sales]) '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Sales Rank], [Measures].[Sales Rank2]} on columns,\n"
+ " {[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].children} on rows\n"
+ "from [Sales]\n"
+ "WHERE ([Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6])",
"Axis #0:\n"
+ "{[Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Sales Rank]}\n"
+ "{[Measures].[Sales Rank2]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Pearl]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Top Measure]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Walrus]}\n"
+ "Row #0: 5\n"
+ "Row #0: 1\n"
+ "Row #0: 1\n"
+ "Row #1: \n"
+ "Row #1: 5\n"
+ "Row #1: 5\n"
+ "Row #2: 3\n"
+ "Row #2: 3\n"
+ "Row #2: 3\n"
+ "Row #3: 5\n"
+ "Row #3: 2\n"
+ "Row #3: 1\n"
+ "Row #4: 3\n"
+ "Row #4: 4\n"
+ "Row #4: 3\n");
}
public void testRankMembersWithTiedExpr() {
assertQueryReturns(
"with "
+ " Set [Beers] as {[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].children} "
+ " member [Measures].[Sales Rank] as ' Rank([Product].CurrentMember, [Beers], [Measures].[Unit Sales]) '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Sales Rank]} on columns,\n"
+ " Generate([Beers], {[Product].CurrentMember}) on rows\n"
+ "from [Sales]\n"
+ "WHERE ([Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6])",
"Axis #0:\n"
+ "{[Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Sales Rank]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Pearl]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Top Measure]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Walrus]}\n"
+ "Row #0: 5\n"
+ "Row #0: 1\n"
+ "Row #1: \n"
+ "Row #1: 5\n"
+ "Row #2: 3\n"
+ "Row #2: 3\n"
+ "Row #3: 5\n"
+ "Row #3: 1\n"
+ "Row #4: 3\n"
+ "Row #4: 3\n");
}
public void testRankTuplesWithTiedExpr() {
assertQueryReturns(
"with "
+ " Set [Beers for Store] as 'NonEmptyCrossJoin("
+ "[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].children, "
+ "{[Store].[USA].[OR].[Portland].[Store 11]})' "
+ " member [Measures].[Sales Rank] as ' Rank(([Product].CurrentMember,[Store].CurrentMember), [Beers for Store], [Measures].[Unit Sales]) '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Sales Rank]} on columns,\n"
+ " Generate([Beers for Store], {([Product].CurrentMember, [Store].CurrentMember)}) on rows\n"
+ "from [Sales]\n"
+ "WHERE ([Time].[1997].[Q2].[6])",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Sales Rank]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Top Measure], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Walrus], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "Row #0: 5\n"
+ "Row #0: 1\n"
+ "Row #1: 3\n"
+ "Row #1: 3\n"
+ "Row #2: 5\n"
+ "Row #2: 1\n"
+ "Row #3: 3\n"
+ "Row #3: 3\n");
}
public void testRankWithExpr2() {
// Data: Unit Sales
// All gender 266,733
// F 131,558
// M 135,215
assertExprReturns(
"Rank([Gender].[All Gender],"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "1");
assertExprReturns(
"Rank([Gender].[F],"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "3");
assertExprReturns(
"Rank([Gender].[M],"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "2");
// Null member. Expression evaluates to null, therefore value does
// not appear in the list of values, therefore the rank is null.
assertExprReturns(
"Rank([Gender].[All Gender].Parent,"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "");
// Empty set. Value would appear after all elements in the empty set,
// therefore rank is 1.
// Note that SSAS gives error 'The first argument to the Rank function,
// a tuple expression, should reference the same hierachies as the
// second argument, a set expression'. I think that's because it can't
// deduce a type for '{}'. SSAS's problem, not Mondrian's. :)
assertExprReturns(
"Rank([Gender].[M],"
+ " {},"
+ " [Measures].[Unit Sales])",
"1");
// As above, but SSAS can type-check this.
assertExprReturns(
"Rank([Gender].[M],"
+ " Filter(Gender.Members, 1 = 0),"
+ " [Measures].[Unit Sales])",
"1");
// Member is not in set
assertExprReturns(
"Rank([Gender].[M]," + " {[Gender].[All Gender], [Gender].[F]})",
"0");
// Even though M is not in the set, its value lies between [All Gender]
// and [F].
assertExprReturns(
"Rank([Gender].[M],"
+ " {[Gender].[All Gender], [Gender].[F]},"
+ " [Measures].[Unit Sales])", "2");
// Expr evaluates to null for some values of set.
assertExprReturns(
"Rank([Product].[Non-Consumable].[Household],"
+ " {[Product].[Food], [Product].[All Products], [Product].[Drink].[Dairy]},"
+ " [Product].CurrentMember.Parent)", "2");
// Expr evaluates to null for all values in the set.
assertExprReturns(
"Rank([Gender].[M],"
+ " {[Gender].[All Gender], [Gender].[F]},"
+ " [Marital Status].[All Marital Status].Parent)", "1");
}
/**
* Tests the 3-arg version of the RANK function with a value
* which returns null within a set of nulls.
*/
public void testRankWithNulls() {
assertQueryReturns(
"with member [Measures].[X] as "
+ "'iif([Measures].[Store Sales]=777,"
+ "[Measures].[Store Sales],Null)'\n"
+ "member [Measures].[Y] as 'Rank([Gender].[M],"
+ "{[Measures].[X],[Measures].[X],[Measures].[X]},"
+ " [Marital Status].[All Marital Status].Parent)'"
+ "select {[Measures].[Y]} on columns from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Y]}\n"
+ "Row #0: 1\n");
}
/**
* Tests a RANK function which is so large that we need to use caching
* in order to execute it efficiently.
*/
public void testRankHuge() {
// If caching is disabled, don't even try -- it will take too long.
if (!MondrianProperties.instance().EnableExpCache.get()) {
return;
}
checkRankHuge(
"WITH \n"
+ " MEMBER [Measures].[Rank among products] \n"
+ " AS ' Rank([Product].CurrentMember, "
+ " Order([Product].members, "
+ " [Measures].[Unit Sales], BDESC)) '\n"
+ "SELECT CrossJoin(\n"
+ " [Gender].members,\n"
+ " {[Measures].[Unit Sales],\n"
+ " [Measures].[Rank among products]}) ON COLUMNS,\n"
// + " {[Product], [Product].[All Products].[Non-Consumable].
// [Periodicals].[Magazines].[Sports Magazines].[Robust].
// [Robust Monthly Sports Magazine]} ON ROWS\n"
+ " {[Product].members} ON ROWS\n"
+ "FROM [Sales]",
false);
}
/**
* As {@link #testRankHuge()}, but for the 3-argument form of the
* <code>RANK</code> function.
*
* <p>Disabled by jhyde, 2006/2/14. Bug 1431316 logged.
*/
public void _testRank3Huge() {
// If caching is disabled, don't even try -- it will take too long.
if (!MondrianProperties.instance().EnableExpCache.get()) {
return;
}
checkRankHuge(
"WITH \n"
+ " MEMBER [Measures].[Rank among products] \n"
+ " AS ' Rank([Product].CurrentMember, [Product].members, [Measures].[Unit Sales]) '\n"
+ "SELECT CrossJoin(\n"
+ " [Gender].members,\n"
+ " {[Measures].[Unit Sales],\n"
+ " [Measures].[Rank among products]}) ON COLUMNS,\n"
+ " {[Product],"
+ " [Product].[All Products].[Non-Consumable].[Periodicals]"
+ ".[Magazines].[Sports Magazines].[Robust]"
+ ".[Robust Monthly Sports Magazine]} ON ROWS\n"
// + " {[Product].members} ON ROWS\n"
+ "FROM [Sales]",
true);
}
private void checkRankHuge(String query, boolean rank3) {
final Result result = getTestContext().executeQuery(query);
final Axis[] axes = result.getAxes();
final Axis rowsAxis = axes[1];
final int rowCount = rowsAxis.getPositions().size();
assertEquals(2256, rowCount);
// [All Products], [All Gender], [Rank]
Cell cell = result.getCell(new int[] {1, 0});
assertEquals("1", cell.getFormattedValue());
// [Robust Monthly Sports Magazine]
Member member = rowsAxis.getPositions().get(rowCount - 1).get(0);
assertEquals("Robust Monthly Sports Magazine", member.getName());
// [Robust Monthly Sports Magazine], [All Gender], [Rank]
cell = result.getCell(new int[] {0, rowCount - 1});
assertEquals("152", cell.getFormattedValue());
cell = result.getCell(new int[] {1, rowCount - 1});
assertEquals(rank3 ? "1,854" : "1,871", cell.getFormattedValue());
// [Robust Monthly Sports Magazine], [Gender].[F], [Rank]
cell = result.getCell(new int[] {2, rowCount - 1});
assertEquals("90", cell.getFormattedValue());
cell = result.getCell(new int[] {3, rowCount - 1});
assertEquals(rank3 ? "1,119" : "1,150", cell.getFormattedValue());
// [Robust Monthly Sports Magazine], [Gender].[M], [Rank]
cell = result.getCell(new int[] {4, rowCount - 1});
assertEquals("62", cell.getFormattedValue());
cell = result.getCell(new int[] {5, rowCount - 1});
assertEquals(rank3 ? "2,131" : "2,147", cell.getFormattedValue());
}
public void testLinRegPointQuarter() {
assertQueryReturns(
"WITH MEMBER [Measures].[Test] as \n"
+ " 'LinRegPoint(\n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members),\n"
+ " Descendants([Time].[1997], [Time].[Quarter]), \n"
+ "[Measures].[Store Sales], \n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members))' \n"
+ "SELECT \n"
+ "{[Measures].[Test],[Measures].[Store Sales]} ON ROWS, \n"
+ "{[Time].[1997].Children} ON COLUMNS \n"
+ "FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Test]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Row #0: 134,299.22\n"
+ "Row #0: 138,972.76\n"
+ "Row #0: 143,646.30\n"
+ "Row #0: 148,319.85\n"
+ "Row #1: 139,628.35\n"
+ "Row #1: 132,666.27\n"
+ "Row #1: 140,271.89\n"
+ "Row #1: 152,671.62\n");
}
/**
* Tests all of the linear regression functions, as suggested by
* <a href="http://support.microsoft.com/kb/q307276/">a Microsoft knowledge
* base article</a>.
*/
public void _testLinRegAll() {
// We have not implemented the LastPeriods function, so we use
// [Time].CurrentMember.Lag(9) : [Time].CurrentMember
// is equivalent to
// LastPeriods(10)
assertQueryReturns(
"WITH MEMBER \n"
+ "[Measures].[Intercept] AS \n"
+ " 'LinRegIntercept([Time].CurrentMember.Lag(10) : [Time].CurrentMember, [Measures].[Unit Sales], [Measures].[Store Sales])' \n"
+ "MEMBER [Measures].[Regression Slope] AS\n"
+ " 'LinRegSlope([Time].CurrentMember.Lag(9) : [Time].CurrentMember,[Measures].[Unit Sales],[Measures].[Store Sales]) '\n"
+ "MEMBER [Measures].[Predict] AS\n"
+ " 'LinRegPoint([Measures].[Unit Sales],[Time].CurrentMember.Lag(9) : [Time].CurrentMember,[Measures].[Unit Sales],[Measures].[Store Sales])',\n"
+ " FORMAT_STRING = 'Standard' \n"
+ "MEMBER [Measures].[Predict Formula] AS\n"
+ " '([Measures].[Regression Slope] * [Measures].[Unit Sales]) + [Measures].[Intercept]',\n"
+ " FORMAT_STRING='Standard'\n"
+ "MEMBER [Measures].[Good Fit] AS\n"
+ " 'LinRegR2([Time].CurrentMember.Lag(9) : [Time].CurrentMember, [Measures].[Unit Sales],[Measures].[Store Sales])',\n"
+ " FORMAT_STRING='#,#.00'\n"
+ "MEMBER [Measures].[Variance] AS\n"
+ " 'LinRegVariance([Time].CurrentMember.Lag(9) : [Time].CurrentMember,[Measures].[Unit Sales],[Measures].[Store Sales])'\n"
+ "SELECT \n"
+ " {[Measures].[Store Sales], \n"
+ " [Measures].[Intercept], \n"
+ " [Measures].[Regression Slope], \n"
+ " [Measures].[Predict], \n"
+ " [Measures].[Predict Formula], \n"
+ " [Measures].[Good Fit], \n"
+ " [Measures].[Variance] } ON COLUMNS, \n"
+ " Descendants([Time].[1997], [Time].[Month]) ON ROWS\n"
+ "FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Intercept]}\n"
+ "{[Measures].[Regression Slope]}\n"
+ "{[Measures].[Predict]}\n"
+ "{[Measures].[Predict Formula]}\n"
+ "{[Measures].[Good Fit]}\n"
+ "{[Measures].[Variance]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 45,539.69\n"
+ "Row #0: 68711.40\n"
+ "Row #0: -1.033\n"
+ "Row #0: 46,350.26\n"
+ "Row #0: 46.350.26\n"
+ "Row #0: -1.#INF\n"
+ "Row #0: 5.17E-08\n"
+ "...\n"
+ "Row #11: 15343.67\n");
}
public void testLinRegPointMonth() {
assertQueryReturns(
"WITH MEMBER \n"
+ "[Measures].[Test] as \n"
+ " 'LinRegPoint(\n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members),\n"
+ " Descendants([Time].[1997], [Time].[Month]), \n"
+ " [Measures].[Store Sales], \n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members)\n"
+ " )' \n"
+ "SELECT \n"
+ " {[Measures].[Test],[Measures].[Store Sales]} ON ROWS, \n"
+ " Descendants([Time].[1997], [Time].[Month]) ON COLUMNS \n"
+ "FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Test]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Row #0: 43,824.36\n"
+ "Row #0: 44,420.51\n"
+ "Row #0: 45,016.66\n"
+ "Row #0: 45,612.81\n"
+ "Row #0: 46,208.95\n"
+ "Row #0: 46,805.10\n"
+ "Row #0: 47,401.25\n"
+ "Row #0: 47,997.40\n"
+ "Row #0: 48,593.55\n"
+ "Row #0: 49,189.70\n"
+ "Row #0: 49,785.85\n"
+ "Row #0: 50,382.00\n"
+ "Row #1: 45,539.69\n"
+ "Row #1: 44,058.79\n"
+ "Row #1: 50,029.87\n"
+ "Row #1: 42,878.25\n"
+ "Row #1: 44,456.29\n"
+ "Row #1: 45,331.73\n"
+ "Row #1: 50,246.88\n"
+ "Row #1: 46,199.04\n"
+ "Row #1: 43,825.97\n"
+ "Row #1: 42,342.27\n"
+ "Row #1: 53,363.71\n"
+ "Row #1: 56,965.64\n");
}
public void testLinRegIntercept() {
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
-126.65,
0.50);
/*
-1#IND missing data
*/
/*
1#INF division by zero
*/
/*
The following table shows query return values from using different
FORMAT_STRING's in an expression involving 'division by zero' (tested on
Intel platforms):
+===========================+=====================+
| Format Strings | Query Return Values |
+===========================+=====================+
| FORMAT_STRING=" | 1.#INF |
+===========================+=====================+
| FORMAT_STRING='Standard' | 1.#J |
+===========================+=====================+
| FORMAT_STRING='Fixed' | 1.#J |
+===========================+=====================+
| FORMAT_STRING='Percent' | 1#I.NF% |
+===========================+=====================+
| FORMAT_STRING='Scientific'| 1.JE+00 |
+===========================+=====================+
*/
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegIntercept({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// first expr constant
if (false) {
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
}
// format does not add '$'
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
7.00,
0.01);
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void testLinRegSlope() {
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
0.4746,
0.50);
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegSlope({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// first expr constant
if (false) {
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
}
// ^^^^
// copy and paste error
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
0.00,
0.01);
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void testLinRegPoint() {
// NOTE: mdx does not parse
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].CurrentMember[Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"0.4746");
}
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " {[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// Expected value is wrong
// zeroth expr constant
if (false) {
assertExprReturns(
"LinRegPoint(-1,"
+ " [Time].[Month].members,"
+ " 7, [Measures].[Store Sales])", "-127.124");
}
// first expr constant
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].[Month].members,"
+ " 7, [Measures].[Store Sales])", "$7.00");
}
// format does not add '$'
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
7.00,
0.01);
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void _testLinRegR2() {
// Why would R2 equal the slope
if (false) {
assertExprReturns(
"LinRegR2([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"0.4746");
}
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegR2({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// first expr constant
assertExprReturns(
"LinRegR2([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegR2([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void _testLinRegVariance() {
assertExprReturns(
"LinRegVariance([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"0.4746");
// empty set
assertExprReturns(
"LinRegVariance({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
// first expr constant
assertExprReturns(
"LinRegVariance([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
// second expr constant
assertExprReturns(
"LinRegVariance([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
public void testVisualTotalsBasic() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// note that Subtotal - Bread only includes 2 displayed children
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,312\n"
+ "Row #1: 815\n"
+ "Row #2: 3,497\n");
}
public void testVisualTotalsConsecutively() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels].[Colony],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// Note that [Bagels] occurs 3 times, but only once does it
// become a subtotal. Note that the subtotal does not include
// the following [Bagels] member.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[*Subtotal - Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels].[Colony]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 5,290\n"
+ "Row #1: 815\n"
+ "Row #2: 163\n"
+ "Row #3: 163\n"
+ "Row #4: 815\n"
+ "Row #5: 3,497\n");
}
public void testVisualTotalsNoPattern() {
assertAxisReturns(
"VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]})",
// Note that the [Bread] visual member is just called [Bread].
"[Product].[Food].[Baked Goods].[Bread]\n"
+ "[Product].[Food].[Baked Goods].[Bread].[Bagels]\n"
+ "[Product].[Food].[Baked Goods].[Bread].[Muffins]");
}
public void testVisualTotalsWithFilter() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{Filter("
+ " VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\"),"
+ "[Measures].[Unit Sales] > 3400)} on rows "
+ "from [Sales]",
// Note that [*Subtotal - Bread] still contains the
// contribution of [Bagels] 815, which was filtered out.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,312\n"
+ "Row #1: 3,497\n");
}
public void testVisualTotalsNested() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " Filter("
+ " VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\"),"
+ " [Measures].[Unit Sales] > 3400),"
+ " \"Second total - *\")} on rows "
+ "from [Sales]",
// Yields the same -- no extra total.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,312\n"
+ "Row #1: 3,497\n");
}
public void testVisualTotalsFilterInside() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " Filter("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " [Measures].[Unit Sales] > 3400),"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 3,497\n"
+ "Row #1: 3,497\n");
}
public void testVisualTotalsOutOfOrder() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// Note that [*Subtotal - Bread] 3497 does not include 815 for
// bagels.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 815\n"
+ "Row #1: 3,497\n"
+ "Row #2: 3,497\n");
}
public void testVisualTotalsGrandparentsAndOutOfOrder() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Frozen Foods].[Breakfast Foods],"
+ " [Product].[All Products].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Golden],"
+ " [Product].[All Products].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Big Time],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// Note:
// [*Subtotal - Food] = 4513 = 815 + 311 + 3497
// [*Subtotal - Bread] = 815, does not include muffins
// [*Subtotal - Breakfast Foods] = 311 = 110 + 201, includes
// grandchildren
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[*Subtotal - Food]}\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Frozen Foods].[*Subtotal - Breakfast Foods]}\n"
+ "{[Product].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Golden]}\n"
+ "{[Product].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Big Time]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,623\n"
+ "Row #1: 815\n"
+ "Row #2: 815\n"
+ "Row #3: 311\n"
+ "Row #4: 110\n"
+ "Row #5: 201\n"
+ "Row #6: 3,497\n");
}
public void testVisualTotalsCrossjoin() {
assertAxisThrows(
"VisualTotals(Crossjoin([Gender].Members, [Store].children))",
"Argument to 'VisualTotals' function must be a set of members; got set of tuples.");
}
/**
* Test case for bug
* <a href="http://jira.pentaho.com/browse/MONDRIAN-615">MONDRIAN-615</a>,
* "VisualTotals doesn't work for the all member".
*/
public void testVisualTotalsAll() {
final String query =
"SELECT \n"
+ " {[Measures].[Unit Sales]} ON 0, \n"
+ " VisualTotals(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[USA],\n"
+ " [Customers].[USA].[CA],\n"
+ " [Customers].[USA].[OR]}) ON 1\n"
+ "FROM [Sales]";
assertQueryReturns(
query,
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "Row #0: 142,407\n"
+ "Row #1: 142,407\n"
+ "Row #2: 74,748\n"
+ "Row #3: 67,659\n");
// Check captions
final Result result = getTestContext().executeQuery(query);
final List<Position> positionList = result.getAxes()[1].getPositions();
assertEquals("All Customers", positionList.get(0).get(0).getCaption());
assertEquals("USA", positionList.get(1).get(0).getCaption());
assertEquals("CA", positionList.get(2).get(0).getCaption());
}
/**
* Test case involving a named set and query pivoted. Suggested in
* <a href="http://jira.pentaho.com/browse/MONDRIAN-615">MONDRIAN-615</a>,
* "VisualTotals doesn't work for the all member".
*/
public void testVisualTotalsWithNamedSetAndPivot() {
assertQueryReturns(
"WITH SET [CA_OR] AS\n"
+ " VisualTotals(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[USA],\n"
+ " [Customers].[USA].[CA],\n"
+ " [Customers].[USA].[OR]})\n"
+ "SELECT \n"
+ " Drilldownlevel({[Time].[1997]}) ON 0, \n"
+ " [CA_OR] ON 1 \n"
+ "FROM [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "Axis #2:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "Row #0: 142,407\n"
+ "Row #0: 36,177\n"
+ "Row #0: 33,131\n"
+ "Row #0: 35,310\n"
+ "Row #0: 37,789\n"
+ "Row #1: 142,407\n"
+ "Row #1: 36,177\n"
+ "Row #1: 33,131\n"
+ "Row #1: 35,310\n"
+ "Row #1: 37,789\n"
+ "Row #2: 74,748\n"
+ "Row #2: 16,890\n"
+ "Row #2: 18,052\n"
+ "Row #2: 18,370\n"
+ "Row #2: 21,436\n"
+ "Row #3: 67,659\n"
+ "Row #3: 19,287\n"
+ "Row #3: 15,079\n"
+ "Row #3: 16,940\n"
+ "Row #3: 16,353\n");
// same query, swap axes
assertQueryReturns(
"WITH SET [CA_OR] AS\n"
+ " VisualTotals(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[USA],\n"
+ " [Customers].[USA].[CA],\n"
+ " [Customers].[USA].[OR]})\n"
+ "SELECT \n"
+ " [CA_OR] ON 0,\n"
+ " Drilldownlevel({[Time].[1997]}) ON 1\n"
+ "FROM [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "Row #0: 142,407\n"
+ "Row #0: 142,407\n"
+ "Row #0: 74,748\n"
+ "Row #0: 67,659\n"
+ "Row #1: 36,177\n"
+ "Row #1: 36,177\n"
+ "Row #1: 16,890\n"
+ "Row #1: 19,287\n"
+ "Row #2: 33,131\n"
+ "Row #2: 33,131\n"
+ "Row #2: 18,052\n"
+ "Row #2: 15,079\n"
+ "Row #3: 35,310\n"
+ "Row #3: 35,310\n"
+ "Row #3: 18,370\n"
+ "Row #3: 16,940\n"
+ "Row #4: 37,789\n"
+ "Row #4: 37,789\n"
+ "Row #4: 21,436\n"
+ "Row #4: 16,353\n");
}
/**
* Tests that members generated by VisualTotals have correct identity.
*
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-295">
* bug MONDRIAN-295, "Query generated by Excel 2007 gives incorrect
* results"</a>.
*/
public void testVisualTotalsIntersect() {
assertQueryReturns(
"WITH\n"
+ "SET [XL_Row_Dim_0] AS 'VisualTotals(Distinct(Hierarchize({Ascendants([Customers].[All Customers].[USA]), Descendants([Customers].[All Customers].[USA])})))' \n"
+ "SELECT \n"
+ "NON EMPTY Hierarchize({[Time].[Year].members}) ON COLUMNS , \n"
+ "NON EMPTY Hierarchize(Intersect({DrilldownLevel({[Customers].[All Customers]})}, [XL_Row_Dim_0])) ON ROWS \n"
+ "FROM [Sales] \n"
+ "WHERE ([Measures].[Store Sales])",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "Axis #2:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #1: 565,238.13\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-668">
* bug MONDRIAN-668, "Intersect should return any VisualTotals members in
* right-hand set"</a>.
*/
public void testVisualTotalsWithNamedSetAndPivotSameAxis() {
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[USA]})},\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [Sales] "
+ "where [Measures].[Sales count]\n",
"Axis #0:\n"
+ "{[Measures].[Sales Count]}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "Row #0: 24,442\n"
+ "Row #0: 24,442\n");
// now with tuples
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " [Marital Status].[M]\n"
+ " * {DrilldownLevel({[Store].[USA]})}\n"
+ " * [Gender].[F],\n"
+ " [Marital Status].[M]\n"
+ " * [XL_Row_Dim_0]\n"
+ " * [Gender].[F])) ON COLUMNS\n"
+ "from [Sales] "
+ "where [Measures].[Sales count]\n",
"Axis #0:\n"
+ "{[Measures].[Sales Count]}\n"
+ "Axis #1:\n"
+ "{[Marital Status].[M], [Store].[USA], [Gender].[F]}\n"
+ "{[Marital Status].[M], [Store].[USA].[CA], [Gender].[F]}\n"
+ "Row #0: 6,054\n"
+ "Row #0: 6,054\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-682">
* bug MONDRIAN-682, "VisualTotals + Distinct-count measure gives wrong
* results"</a>.
*/
public void testVisualTotalsDistinctCountMeasure() {
// distinct measure
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[All Stores]})},\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [HR] "
+ "where [Measures].[Number of Employees]\n",
"Axis #0:\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 193\n"
+ "Row #0: 193\n");
// distinct measure
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA].[Beverly Hills]),\n"
+ " Descendants([Store].[USA].[CA].[Beverly Hills]),\n"
+ " Ascendants([Store].[USA].[CA].[Los Angeles]),\n"
+ " Descendants([Store].[USA].[CA].[Los Angeles])})))"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[All Stores]})},\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [HR] "
+ "where [Measures].[Number of Employees]\n",
"Axis #0:\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 110\n"
+ "Row #0: 110\n");
// distinct measure on columns
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select {[Measures].[Count], [Measures].[Number of Employees]} on COLUMNS,"
+ " NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[All Stores]})},\n"
+ " [XL_Row_Dim_0])) ON ROWS\n"
+ "from [HR] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Count]}\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #2:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 2,316\n"
+ "Row #0: 193\n"
+ "Row #1: 2,316\n"
+ "Row #1: 193\n");
// distinct measure with tuples
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " [Marital Status].[M]\n"
+ " * {DrilldownLevel({[Store].[USA]})}\n"
+ " * [Gender].[F],\n"
+ " [Marital Status].[M]\n"
+ " * [XL_Row_Dim_0]\n"
+ " * [Gender].[F])) ON COLUMNS\n"
+ "from [Sales] "
+ "where [Measures].[Customer count]\n",
"Axis #0:\n"
+ "{[Measures].[Customer Count]}\n"
+ "Axis #1:\n"
+ "{[Marital Status].[M], [Store].[USA], [Gender].[F]}\n"
+ "{[Marital Status].[M], [Store].[USA].[CA], [Gender].[F]}\n"
+ "Row #0: 654\n"
+ "Row #0: 654\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-761">
* bug MONDRIAN-761, "VisualTotalMember cannot be cast to
* RolapCubeMember"</a>.
*/
public void testVisualTotalsClassCast() {
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[WA].[Yakima]), \n"
+ " Descendants([Store].[USA].[WA].[Yakima]), \n"
+ " Ascendants([Store].[USA].[WA].[Walla Walla]), \n"
+ " Descendants([Store].[USA].[WA].[Walla Walla]), \n"
+ " Ascendants([Store].[USA].[WA].[Tacoma]), \n"
+ " Descendants([Store].[USA].[WA].[Tacoma]), \n"
+ " Ascendants([Store].[USA].[WA].[Spokane]), \n"
+ " Descendants([Store].[USA].[WA].[Spokane]), \n"
+ " Ascendants([Store].[USA].[WA].[Seattle]), \n"
+ " Descendants([Store].[USA].[WA].[Seattle]), \n"
+ " Ascendants([Store].[USA].[WA].[Bremerton]), \n"
+ " Descendants([Store].[USA].[WA].[Bremerton]), \n"
+ " Ascendants([Store].[USA].[OR]), \n"
+ " Descendants([Store].[USA].[OR])}))) \n"
+ " SELECT NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " DrilldownMember(\n"
+ " {{DrilldownMember(\n"
+ " {{DrilldownMember(\n"
+ " {{DrilldownLevel(\n"
+ " {[Store].[All Stores]})}},\n"
+ " {[Store].[USA]})}},\n"
+ " {[Store].[USA].[WA]})}},\n"
+ " {[Store].[USA].[WA].[Bremerton]}),\n"
+ " [XL_Row_Dim_0]))\n"
+ "DIMENSION PROPERTIES \n"
+ " PARENT_UNIQUE_NAME, \n"
+ " [Store].[Store Name].[Store Type],\n"
+ " [Store].[Store Name].[Store Manager],\n"
+ " [Store].[Store Name].[Store Sqft],\n"
+ " [Store].[Store Name].[Grocery Sqft],\n"
+ " [Store].[Store Name].[Frozen Sqft],\n"
+ " [Store].[Store Name].[Meat Sqft],\n"
+ " [Store].[Store Name].[Has coffee bar],\n"
+ " [Store].[Store Name].[Street address] ON COLUMNS \n"
+ "FROM [HR]\n"
+ "WHERE \n"
+ " ([Measures].[Number of Employees])\n"
+ "CELL PROPERTIES\n"
+ " VALUE,\n"
+ " FORMAT_STRING,\n"
+ " LANGUAGE,\n"
+ " BACK_COLOR,\n"
+ " FORE_COLOR,\n"
+ " FONT_FLAGS",
"Axis #0:\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[WA].[Bremerton]}\n"
+ "{[Store].[USA].[WA].[Bremerton].[Store 3]}\n"
+ "{[Store].[USA].[WA].[Seattle]}\n"
+ "{[Store].[USA].[WA].[Spokane]}\n"
+ "{[Store].[USA].[WA].[Tacoma]}\n"
+ "{[Store].[USA].[WA].[Walla Walla]}\n"
+ "{[Store].[USA].[WA].[Yakima]}\n"
+ "Row #0: 419\n"
+ "Row #0: 419\n"
+ "Row #0: 136\n"
+ "Row #0: 283\n"
+ "Row #0: 62\n"
+ "Row #0: 62\n"
+ "Row #0: 62\n"
+ "Row #0: 62\n"
+ "Row #0: 74\n"
+ "Row #0: 4\n"
+ "Row #0: 19\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-678">
* bug MONDRIAN-678, "VisualTotals gives UnsupportedOperationException
* calling getOrdinal"</a>. Key difference from previous test is that there
* are multiple hierarchies in Named set.
*/
public void testVisualTotalsWithNamedSetOfTuples() {
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Customers].[All Customers].[USA].[CA].[Beverly Hills].[Ari Tweten]),\n"
+ " Descendants([Customers].[All Customers].[USA].[CA].[Beverly Hills].[Ari Tweten]),\n"
+ " Ascendants([Customers].[All Customers].[Mexico]),\n"
+ " Descendants([Customers].[All Customers].[Mexico])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " (DrilldownMember(\n"
+ " {{DrilldownMember(\n"
+ " {{DrilldownLevel(\n"
+ " {[Customers].[All Customers]})}},\n"
+ " {[Customers].[All Customers].[USA]})}},\n"
+ " {[Customers].[All Customers].[USA].[CA]})),\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [Sales]\n"
+ "where [Measures].[Sales count]\n",
"Axis #0:\n"
+ "{[Measures].[Sales Count]}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[CA].[Beverly Hills]}\n"
+ "Row #0: 4\n"
+ "Row #0: 4\n"
+ "Row #0: 4\n"
+ "Row #0: 4\n");
}
public void testVisualTotalsLevel() {
Result result = getTestContext().executeQuery(
"select {[Measures].[Unit Sales]} on columns,\n"
+ "{[Product].[All Products],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " VisualTotals(\n"
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},\n"
+ " \"**Subtotal - *\")} on rows\n"
+ "from [Sales]");
final List<Position> rowPos = result.getAxes()[1].getPositions();
final Member member0 = rowPos.get(0).get(0);
assertEquals("All Products", member0.getName());
assertEquals("(All)", member0.getLevel().getName());
final Member member1 = rowPos.get(1).get(0);
assertEquals("Bread", member1.getName());
assertEquals("Product Category", member1.getLevel().getName());
final Member member2 = rowPos.get(2).get(0);
assertEquals("*Subtotal - Bread", member2.getName());
assertEquals("Product Category", member2.getLevel().getName());
final Member member3 = rowPos.get(3).get(0);
assertEquals("Bagels", member3.getName());
assertEquals("Product Subcategory", member3.getLevel().getName());
final Member member4 = rowPos.get(4).get(0);
assertEquals("Muffins", member4.getName());
assertEquals("Product Subcategory", member4.getLevel().getName());
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-749">
* MONDRIAN-749, "Cannot use visual totals members in calculations"</a>.
*
* <p>The bug is not currently fixed, so it is a negative test case. Row #2
* cell #1 contains an exception, but should be "**Subtotal - Bread :
* Product Subcategory".
*/
public void testVisualTotalsMemberInCalculation() {
getTestContext().assertQueryReturns(
"with member [Measures].[Foo] as\n"
+ " [Product].CurrentMember.Name || ' : ' || [Product].Level.Name\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on columns,\n"
+ "{[Product].[All Products],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " VisualTotals(\n"
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},\n"
+ " \"**Subtotal - *\")} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Foo]}\n"
+ "Axis #2:\n"
+ "{[Product].[All Products]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: All Products : (All)\n"
+ "Row #1: 7,870\n"
+ "Row #1: Bread : Product Category\n"
+ "Row #2: 4,312\n"
+ "Row #2: #ERR: mondrian.olap.fun.MondrianEvaluationException: Could not find an aggregator in the current evaluation context\n"
+ "Row #3: 815\n"
+ "Row #3: Bagels : Product Subcategory\n"
+ "Row #4: 3,497\n"
+ "Row #4: Muffins : Product Subcategory\n");
}
public void testCalculatedChild() {
// Construct calculated children with the same name for both [Drink] and
// [Non-Consumable]. Then, create a metric to select the calculated
// child based on current product member.
assertQueryReturns(
"with\n"
+ " member [Product].[All Products].[Drink].[Calculated Child] as '[Product].[All Products].[Drink].[Alcoholic Beverages]'\n"
+ " member [Product].[All Products].[Non-Consumable].[Calculated Child] as '[Product].[All Products].[Non-Consumable].[Carousel]'\n"
+ " member [Measures].[Unit Sales CC] as '([Measures].[Unit Sales],[Product].currentmember.CalculatedChild(\"Calculated Child\"))'\n"
+ " select non empty {[Measures].[Unit Sales CC]} on columns,\n"
+ " non empty {[Product].[Drink], [Product].[Non-Consumable]} on rows\n"
+ " from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales CC]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 6,838\n" // Calculated child for [Drink]
+ "Row #1: 841\n"); // Calculated child for [Non-Consumable]
Member member = executeSingletonAxis(
"[Product].[All Products].CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCalculatedChildUsingItem() {
// Construct calculated children with the same name for both [Drink] and
// [Non-Consumable]. Then, create a metric to select the first
// calculated child.
assertQueryReturns(
"with\n"
+ " member [Product].[All Products].[Drink].[Calculated Child] as '[Product].[All Products].[Drink].[Alcoholic Beverages]'\n"
+ " member [Product].[All Products].[Non-Consumable].[Calculated Child] as '[Product].[All Products].[Non-Consumable].[Carousel]'\n"
+ " member [Measures].[Unit Sales CC] as '([Measures].[Unit Sales],AddCalculatedMembers([Product].currentmember.children).Item(\"Calculated Child\"))'\n"
+ " select non empty {[Measures].[Unit Sales CC]} on columns,\n"
+ " non empty {[Product].[Drink], [Product].[Non-Consumable]} on rows\n"
+ " from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales CC]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 6,838\n"
// Note: For [Non-Consumable], the calculated child for [Drink] was
// selected!
+ "Row #1: 6,838\n");
Member member = executeSingletonAxis(
"[Product].[All Products].CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCalculatedChildOnMemberWithNoChildren() {
Member member =
executeSingletonAxis(
"[Measures].[Store Sales].CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCalculatedChildOnNullMember() {
Member member =
executeSingletonAxis(
"[Measures].[Store Sales].parent.CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCast() {
// NOTE: Some of these tests fail with 'cannot convert ...', and they
// probably shouldn't. Feel free to fix the conversion.
// -- jhyde, 2006/9/3
// From integer
// To integer (trivial)
assertExprReturns("0 + Cast(1 + 2 AS Integer)", "3");
// To String
assertExprReturns("'' || Cast(1 + 2 AS String)", "3.0");
// To Boolean
assertExprReturns("1=1 AND Cast(1 + 2 AS Boolean)", "true");
assertExprReturns("1=1 AND Cast(1 - 1 AS Boolean)", "false");
// From boolean
// To String
assertExprReturns("'' || Cast((1 = 1 AND 1 = 2) AS String)", "false");
// This case demonstrates the relative precedence of 'AS' in 'CAST'
// and 'AS' for creating inline named sets. See also bug MONDRIAN-648.
Util.discard(Bug.BugMondrian648Fixed);
assertExprReturns(
"'xxx' || Cast(1 = 1 AND 1 = 2 AS String)",
"xxxfalse");
// To boolean (trivial)
assertExprReturns(
"1=1 AND Cast((1 = 1 AND 1 = 2) AS Boolean)",
"false");
assertExprReturns(
"1=1 OR Cast(1 = 1 AND 1 = 2 AS Boolean)",
"true");
// From null : should not throw exceptions since RolapResult.executeBody
// can receive NULL values when the cell value is not loaded yet, so
// should return null instead.
// To Integer : Expect to return NULL
// Expect to return NULL
assertExprReturns("0 * Cast(NULL AS Integer)", "");
// To Numeric : Expect to return NULL
// Expect to return NULL
assertExprReturns("0 * Cast(NULL AS Numeric)", "");
// To String : Expect to return "null"
assertExprReturns("'' || Cast(NULL AS String)", "null");
// To Boolean : Expect to return NULL, but since FunUtil.BooleanNull
// does not implement three-valued boolean logic yet, this will return
// false
assertExprReturns("1=1 AND Cast(NULL AS Boolean)", "false");
// Double is not allowed as a type
assertExprThrows(
"Cast(1 AS Double)",
"Unknown type 'Double'; values are NUMERIC, STRING, BOOLEAN");
// An integer constant is not allowed as a type
assertExprThrows(
"Cast(1 AS 5)",
"Syntax error at line 1, column 11, token '5'");
assertExprReturns("Cast('tr' || 'ue' AS boolean)", "true");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-524">
* MONDRIAN-524, "VB functions: expected primitive type, got
* java.lang.Object"</a>.
*/
public void testCastBug524() {
assertExprReturns(
"Cast(Int([Measures].[Store Sales] / 3600) as String)",
"157");
}
/**
* Tests {@link mondrian.olap.FunTable#getFunInfoList()}, but more
* importantly, generates an HTML table of all implemented functions into
* a file called "functions.html". You can manually include that table
* in the <a href="{@docRoot}/../mdx.html">MDX
* specification</a>.
*/
public void testDumpFunctions() throws IOException {
final List<FunInfo> funInfoList = new ArrayList<FunInfo>();
funInfoList.addAll(BuiltinFunTable.instance().getFunInfoList());
// Add some UDFs.
funInfoList.add(
new FunInfo(
new UdfResolver(
new UdfResolver.ClassUdfFactory(
CurrentDateMemberExactUdf.class,
null))));
funInfoList.add(
new FunInfo(
new UdfResolver(
new UdfResolver.ClassUdfFactory(
CurrentDateMemberUdf.class,
null))));
funInfoList.add(
new FunInfo(
new UdfResolver(
new UdfResolver.ClassUdfFactory(
CurrentDateStringUdf.class,
null))));
Collections.sort(funInfoList);
final File file = new File("functions.html");
final FileOutputStream os = new FileOutputStream(file);
final PrintWriter pw = new PrintWriter(os);
pw.println("<table border='1'>");
pw.println("<tr>");
pw.println("<td><b>Name</b></td>");
pw.println("<td><b>Description</b></td>");
pw.println("</tr>");
for (FunInfo funInfo : funInfoList) {
pw.println("<tr>");
pw.print(" <td valign=top><code>");
printHtml(pw, funInfo.getName());
pw.println("</code></td>");
pw.print(" <td>");
if (funInfo.getDescription() != null) {
printHtml(pw, funInfo.getDescription());
}
pw.println();
final String[] signatures = funInfo.getSignatures();
if (signatures != null) {
pw.println(" <h1>Syntax</h1>");
for (int j = 0; j < signatures.length; j++) {
if (j > 0) {
pw.println("<br/>");
}
String signature = signatures[j];
pw.print(" ");
printHtml(pw, signature);
}
pw.println();
}
pw.println(" </td>");
pw.println("</tr>");
}
pw.println("</table>");
pw.close();
}
public void testComplexOrExpr()
{
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
return;
}
// make sure all aggregates referenced in the OR expression are
// processed in a single load request by setting the eval depth to
// a value smaller than the number of measures
int origDepth = MondrianProperties.instance().MaxEvalDepth.get();
MondrianProperties.instance().MaxEvalDepth.set(3);
assertQueryReturns(
"with set [*NATIVE_CJ_SET] as '[Store].[Store Country].members' "
+ "set [*GENERATED_MEMBERS_Measures] as "
+ " '{[Measures].[Unit Sales], [Measures].[Store Cost], "
+ " [Measures].[Sales Count], [Measures].[Customer Count], "
+ " [Measures].[Promotion Sales]}' "
+ "set [*GENERATED_MEMBERS] as "
+ " 'Generate([*NATIVE_CJ_SET], {[Store].CurrentMember})' "
+ "member [Store].[*SUBTOTAL_MEMBER_SEL~SUM] as 'Sum([*GENERATED_MEMBERS])' "
+ "select [*GENERATED_MEMBERS_Measures] ON COLUMNS, "
+ "NON EMPTY "
+ " Filter("
+ " Generate("
+ " [*NATIVE_CJ_SET], "
+ " {[Store].CurrentMember}), "
+ " (((((NOT IsEmpty([Measures].[Unit Sales])) OR "
+ " (NOT IsEmpty([Measures].[Store Cost]))) OR "
+ " (NOT IsEmpty([Measures].[Sales Count]))) OR "
+ " (NOT IsEmpty([Measures].[Customer Count]))) OR "
+ " (NOT IsEmpty([Measures].[Promotion Sales])))) "
+ "on rows "
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n");
MondrianProperties.instance().MaxEvalDepth.set(origDepth);
}
public void testLeftFunctionWithValidArguments() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 4)=\"Bell\") on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithLengthValueZero() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 0)=\"\" And "
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\") on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithLengthValueEqualToStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 10)=\"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithLengthMoreThanStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 20)=\"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithZeroLengthString() {
assertQueryReturns(
"select filter([Store].MEMBERS,Left(\"\", 20)=\"\" "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithNegativeLength() {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, -20)=\"Bellingham\") "
+ "on 0 from sales",
Util.IBM_JVM
? "StringIndexOutOfBoundsException: null"
: "StringIndexOutOfBoundsException: String index out of range: "
+ "-20");
}
public void testMidFunctionWithValidArguments() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 4, 6) = \"lingha\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithZeroLengthStringArgument() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"\", 4, 6) = \"\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithLengthArgumentLargerThanStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 4, 20) = \"lingham\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithStartIndexGreaterThanStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 20, 2) = \"\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithStartIndexZeroFails() {
// Note: SSAS 2005 treats start<=0 as 1, therefore gives different
// result for this query. We favor the VBA spec over SSAS 2005.
if (Bug.Ssas2005Compatible) {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 0, 2) = \"Be\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
} else {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 0, 2) = \"Be\")"
+ "on 0 from sales",
"Invalid parameter. Start parameter of Mid function must be "
+ "positive");
}
}
public void testMidFunctionWithStartIndexOne() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 1, 2) = \"Be\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithNegativeStartIndex() {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", -20, 2) = \"\")"
+ "on 0 from sales",
"Invalid parameter. "
+ "Start parameter of Mid function must be positive");
}
public void testMidFunctionWithNegativeLength() {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 2, -2) = \"\")"
+ "on 0 from sales",
"Invalid parameter. "
+ "Length parameter of Mid function must be non-negative");
}
public void testMidFunctionWithoutLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 2) = \"ellingham\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLenFunctionWithNonEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS, "
+ "Len([Store].CURRENTMEMBER.Name) = 3) on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n");
}
public void testLenFunctionWithAnEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS,Len(\"\")=0 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLenFunctionWithNullString() {
// SSAS2005 returns 0
assertQueryReturns(
"with member [Measures].[Foo] as ' NULL '\n"
+ " member [Measures].[Bar] as ' len([Measures].[Foo]) '\n"
+ "select [Measures].[Bar] on 0\n"
+ "from [Warehouse and Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Bar]}\n"
+ "Row #0: 0\n");
// same, but inline
assertExprReturns("len(null)", 0, 0);
}
public void testUCaseWithNonEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS, "
+ " UCase([Store].CURRENTMEMBER.Name) = \"BELLINGHAM\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testUCaseWithEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS, "
+ " UCase(\"\") = \"\" "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testInStrFunctionWithValidArguments() {
assertQueryReturns(
"select filter([Store].MEMBERS,InStr(\"Bellingham\", \"ingha\")=5 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void
testIifFWithBooleanBooleanAndNumericParameterForReturningTruePart()
{
assertQueryReturns(
"SELECT Filter(Store.allmembers, "
+ "iif(measures.profit < 400000,"
+ "[store].currentMember.NAME = \"USA\", 0)) on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n");
}
public void
testIifWithBooleanBooleanAndNumericParameterForReturningFalsePart()
{
assertQueryReturns(
"SELECT Filter([Store].[USA].[CA].[Beverly Hills].children, "
+ "iif(measures.profit > 400000,"
+ "[store].currentMember.NAME = \"USA\", 1)) on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA].[Beverly Hills].[Store 6]}\n"
+ "Row #0: 21,333\n");
}
public void testIIFWithBooleanBooleanAndNumericParameterForReturningZero() {
assertQueryReturns(
"SELECT Filter(Store.allmembers, "
+ "iif(measures.profit > 400000,"
+ "[store].currentMember.NAME = \"USA\", 0)) on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testInStrFunctionWithEmptyString1() {
assertQueryReturns(
"select filter([Store].MEMBERS,InStr(\"\", \"ingha\")=0 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testInStrFunctionWithEmptyString2() {
assertQueryReturns(
"select filter([Store].MEMBERS,InStr(\"Bellingham\", \"\")=1 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testGetCaptionUsingMemberDotCaption() {
assertQueryReturns(
"SELECT Filter(Store.allmembers, "
+ "[store].currentMember.caption = \"USA\") on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n");
}
private static void printHtml(PrintWriter pw, String s) {
final String escaped = StringEscaper.htmlEscaper.escapeString(s);
pw.print(escaped);
}
public void testCache() {
// test various data types: integer, string, member, set, tuple
assertExprReturns("Cache(1 + 2)", "3");
assertExprReturns("Cache('foo' || 'bar')", "foobar");
assertAxisReturns(
"[Gender].Children",
"[Gender].[F]\n"
+ "[Gender].[M]");
assertAxisReturns(
"([Gender].[M], [Marital Status].[S].PrevMember)",
"{[Gender].[M], [Marital Status].[M]}");
// inside another expression
assertAxisReturns(
"Order(Cache([Gender].Children), Cache(([Measures].[Unit Sales], [Time].[1997].[Q1])), BDESC)",
"[Gender].[M]\n"
+ "[Gender].[F]");
// doesn't work with multiple args
assertExprThrows(
"Cache(1, 2)",
"No function matches signature 'Cache(<Numeric Expression>, <Numeric Expression>)'");
}
// The following methods test VBA functions. They don't test all of them,
// because the raw methods are tested in VbaTest, but they test the core
// functionalities like error handling and operator overloading.
public void testVbaBasic() {
// Exp is a simple function: one arg.
assertExprReturns("exp(0)", "1");
assertExprReturns("exp(1)", Math.E, 0.00000001);
assertExprReturns("exp(-2)", 1d / (Math.E * Math.E), 0.00000001);
// If any arg is null, result is null.
assertExprReturns("exp(cast(null as numeric))", "");
}
// Test a VBA function with variable number of args.
public void testVbaOverloading() {
assertExprReturns("replace('xyzxyz', 'xy', 'a')", "azaz");
assertExprReturns("replace('xyzxyz', 'xy', 'a', 2)", "xyzaz");
assertExprReturns("replace('xyzxyz', 'xy', 'a', 1, 1)", "azxyz");
}
// Test VBA exception handling
public void testVbaExceptions() {
assertExprThrows(
"right(\"abc\", -4)",
Util.IBM_JVM
? "StringIndexOutOfBoundsException: null"
: "StringIndexOutOfBoundsException: "
+ "String index out of range: -4");
}
public void testVbaDateTime() {
// function which returns date
assertExprReturns(
"Format(DateSerial(2006, 4, 29), \"Long Date\")",
"Saturday, April 29, 2006");
// function with date parameter
assertExprReturns("Year(DateSerial(2006, 4, 29))", "2,006");
}
public void testExcelPi() {
// The PI function is defined in the Excel class.
assertExprReturns("Pi()", "3");
}
public void testExcelPower() {
assertExprReturns("Power(8, 0.333333)", 2.0, 0.01);
assertExprReturns("Power(-2, 0.5)", Double.NaN, 0.001);
}
// Comment from the bug: the reason for this is that in AbstractExpCompiler
// in the compileInteger method we are casting an IntegerCalc into a
// DoubleCalc and there is no check for IntegerCalc in the NumericType
// conditional path.
public void testBug1881739() {
assertExprReturns("LEFT(\"TEST\", LEN(\"TEST\"))", "TEST");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-296">
* MONDRIAN-296, "Cube getTimeDimension use when Cube has no Time
* dimension"</a>.
*/
public void testCubeTimeDimensionFails() {
assertQueryThrows(
"select LastPeriods(1) on columns from [Store]",
"'LastPeriods', no time dimension");
assertQueryThrows(
"select OpeningPeriod() on columns from [Store]",
"'OpeningPeriod', no time dimension");
assertQueryThrows(
"select OpeningPeriod([Store Type]) on columns from [Store]",
"'OpeningPeriod', no time dimension");
assertQueryThrows(
"select ClosingPeriod() on columns from [Store]",
"'ClosingPeriod', no time dimension");
assertQueryThrows(
"select ClosingPeriod([Store Type]) on columns from [Store]",
"'ClosingPeriod', no time dimension");
assertQueryThrows(
"select ParallelPeriod() on columns from [Store]",
"'ParallelPeriod', no time dimension");
assertQueryThrows(
"select PeriodsToDate() on columns from [Store]",
"'PeriodsToDate', no time dimension");
assertQueryThrows(
"select Mtd() on columns from [Store]",
"'Mtd', no time dimension");
}
public void testFilterEmpty() {
// Unlike "Descendants(<set>, ...)", we do not need to know the precise
// type of the set, therefore it is OK if the set is empty.
assertAxisReturns(
"Filter({}, 1=0)",
"");
assertAxisReturns(
"Filter({[Time].[Time].Children}, 1=0)",
"");
}
public void testFilterCalcSlicer() {
assertQueryReturns(
"with member [Time].[Time].[Date Range] as \n"
+ "'Aggregate({[Time].[1997].[Q1]:[Time].[1997].[Q3]})'\n"
+ "select\n"
+ "{[Measures].[Unit Sales],[Measures].[Store Cost],\n"
+ "[Measures].[Store Sales]} ON columns,\n"
+ "NON EMPTY Filter ([Store].[Store State].members,\n"
+ "[Measures].[Store Cost] > 75000) ON rows\n"
+ "from [Sales] where [Time].[Date Range]",
"Axis #0:\n"
+ "{[Time].[Date Range]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 90,131\n"
+ "Row #0: 76,151.59\n"
+ "Row #0: 190,776.88\n");
assertQueryReturns(
"with member [Time].[Time].[Date Range] as \n"
+ "'Aggregate({[Time].[1997].[Q1]:[Time].[1997].[Q3]})'\n"
+ "select\n"
+ "{[Measures].[Unit Sales],[Measures].[Store Cost],\n"
+ "[Measures].[Store Sales]} ON columns,\n"
+ "NON EMPTY Order (Filter ([Store].[Store State].members,\n"
+ "[Measures].[Store Cost] > 100),[Measures].[Store Cost], DESC) ON rows\n"
+ "from [Sales] where [Time].[Date Range]",
"Axis #0:\n"
+ "{[Time].[Date Range]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "Row #0: 90,131\n"
+ "Row #0: 76,151.59\n"
+ "Row #0: 190,776.88\n"
+ "Row #1: 53,312\n"
+ "Row #1: 45,435.93\n"
+ "Row #1: 113,966.00\n"
+ "Row #2: 51,306\n"
+ "Row #2: 43,033.82\n"
+ "Row #2: 107,823.63\n");
}
public void testExistsMembersAll() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[Country].Members,\n"
+ " [Customers].[State Province].[CA],\n"
+ " [Customers].[Canada].[BC].[Richmond]},\n"
+ " {[Customers].[All Customers]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[Canada]}\n"
+ "{[Customers].[Mexico]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[Canada].[BC].[Richmond]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 266,773\n"
+ "Row #0: 74,748\n"
+ "Row #0: \n");
}
public void testExistsMembersLevel2() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[Country].Members,\n"
+ " [Customers].[State Province].[CA],\n"
+ " [Customers].[Canada].[BC].[Richmond]},\n"
+ " {[Customers].[Country].[USA]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 266,773\n"
+ "Row #0: 74,748\n");
}
public void testExistsMembersDiffDim() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[All Customers].Children,\n"
+ " [Customers].[State Province].Members},\n"
+ " {[Product].Members})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testExistsMembers2Hierarchies() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[All Customers].Children,\n"
+ " [Customers].[State Province].Members,\n"
+ " [Customers].[Country].[Canada],\n"
+ " [Customers].[Country].[Mexico]},\n"
+ " {[Customers].[Country].[USA],\n"
+ " [Customers].[State Province].[Veracruz]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[Mexico]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[Mexico].[Veracruz]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "{[Customers].[USA].[WA]}\n"
+ "{[Customers].[Mexico]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: \n"
+ "Row #0: 266,773\n"
+ "Row #0: \n"
+ "Row #0: 74,748\n"
+ "Row #0: 67,659\n"
+ "Row #0: 124,366\n"
+ "Row #0: \n");
}
public void testExistsTuplesAll() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Product].[All Products]},{[Customers].[All Customers]}),\n"
+ " {[Customers].[All Customers]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[All Products], [Customers].[All Customers]}\n"
+ "Row #0: 266,773\n");
}
public void testExistsTuplesLevel2() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Product].[All Products]},{[Customers].[All Customers].Children}),\n"
+ " {[Customers].[All Customers].[USA]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[All Products], [Customers].[USA]}\n"
+ "Row #0: 266,773\n");
}
public void testExistsTuplesLevel23() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Customers].[State Province].Members}, {[Product].[All Products]}),\n"
+ " {[Customers].[All Customers].[USA]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA], [Product].[All Products]}\n"
+ "{[Customers].[USA].[OR], [Product].[All Products]}\n"
+ "{[Customers].[USA].[WA], [Product].[All Products]}\n"
+ "Row #0: 74,748\n"
+ "Row #0: 67,659\n"
+ "Row #0: 124,366\n");
}
public void testExistsTuples2Dim() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Customers].[State Province].Members}, {[Product].[Product Family].Members}),\n"
+ " {([Product].[Product Department].[Dairy],[Customers].[All Customers].[USA])})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA], [Product].[Drink]}\n"
+ "{[Customers].[USA].[OR], [Product].[Drink]}\n"
+ "{[Customers].[USA].[WA], [Product].[Drink]}\n"
+ "Row #0: 7,102\n"
+ "Row #0: 6,106\n"
+ "Row #0: 11,389\n");
}
public void testExistsTuplesDiffDim() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin(\n"
+ " crossjoin({[Customers].[State Province].Members},\n"
+ " {[Time].[Year].[1997]}), \n"
+ " {[Product].[Product Family].Members}),\n"
+ " {([Product].[Product Department].[Dairy],\n"
+ " [Promotions].[All Promotions], \n"
+ " [Customers].[All Customers].[USA])})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
/**
* Executes a query that has a complex parse tree. Goal is to find
* algorithmic complexity bugs in the validator which would make the query
* run extremely slowly.
*/
public void testComplexQuery() {
final String expected =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 131,558\n"
+ "Row #2: 135,215\n";
// hand written case
assertQueryReturns(
"select\n"
+ " [Measures].[Unit Sales] on 0,\n"
+ " Distinct({\n"
+ " [Gender],\n"
+ " Tail(\n"
+ " Head({\n"
+ " [Gender],\n"
+ " [Gender].[F],\n"
+ " [Gender].[M]},\n"
+ " 2),\n"
+ " 1),\n"
+ " Tail(\n"
+ " Head({\n"
+ " [Gender],\n"
+ " [Gender].[F],\n"
+ " [Gender].[M]},\n"
+ " 2),\n"
+ " 1),\n"
+ " [Gender].[M]}) on 1\n"
+ "from [Sales]", expected);
// generated equivalent
StringBuilder buf = new StringBuilder();
buf.append(
"select\n"
+ " [Measures].[Unit Sales] on 0,\n");
generateComplex(buf, " ", 0, 7, 3);
buf.append(
" on 1\n"
+ "from [Sales]");
if (false) {
System.out.println(buf.toString().length() + ": " + buf.toString());
}
assertQueryReturns(buf.toString(), expected);
}
/**
* Recursive routine to generate a complex MDX expression.
*
* @param buf String builder
* @param indent Indent
* @param depth Current depth
* @param depthLimit Max recursion depth
* @param breadth Number of iterations at each depth
*/
private void generateComplex(
StringBuilder buf,
String indent,
int depth,
int depthLimit,
int breadth)
{
buf.append(indent + "Distinct({\n");
buf.append(indent + " [Gender],\n");
for (int i = 0; i < breadth; i++) {
if (depth < depthLimit) {
buf.append(indent + " Tail(\n");
buf.append(indent + " Head({\n");
generateComplex(
buf,
indent + " ",
depth + 1,
depthLimit,
breadth);
buf.append("},\n");
buf.append(indent + " 2),\n");
buf.append(indent + " 1),\n");
} else {
buf.append(indent + " [Gender].[F],\n");
}
}
buf.append(indent + " [Gender].[M]})");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-1050">
* MONDRIAN-1050, "MDX Order function fails when using DateTime expression
* for ordering"</a>.
*/
public void testDateParameter() throws Exception {
executeQuery(
"SELECT {[Measures].[Unit Sales]} ON COLUMNS, Order([Gender].Members, Now(), ASC) ON ROWS FROM [Sales]");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-1043">
* MONDRIAN-1043, "Hierarchize with Except sort set members differently than
* in Mondrian 3.2.1"</a>.
*
* <p>This test makes sure that
* Hierarchize and Except can be used within each other and that the
* sort order is maintained.</p>
*/
public void testHierarchizeExcept() throws Exception {
final String[] mdxA =
new String[] {
"SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS, Hierarchize(Except({[Customers].[USA].Children, [Customers].[USA].[CA].Children}, [Customers].[USA].[CA])) ON ROWS FROM [Sales]",
"SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS, Except(Hierarchize({[Customers].[USA].Children, [Customers].[USA].[CA].Children}), [Customers].[USA].[CA]) ON ROWS FROM [Sales] "
};
for (String mdx : mdxA) {
assertQueryReturns(
mdx,
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Customers].[USA].[CA].[Altadena]}\n"
+ "{[Customers].[USA].[CA].[Arcadia]}\n"
+ "{[Customers].[USA].[CA].[Bellflower]}\n"
+ "{[Customers].[USA].[CA].[Berkeley]}\n"
+ "{[Customers].[USA].[CA].[Beverly Hills]}\n"
+ "{[Customers].[USA].[CA].[Burbank]}\n"
+ "{[Customers].[USA].[CA].[Burlingame]}\n"
+ "{[Customers].[USA].[CA].[Chula Vista]}\n"
+ "{[Customers].[USA].[CA].[Colma]}\n"
+ "{[Customers].[USA].[CA].[Concord]}\n"
+ "{[Customers].[USA].[CA].[Coronado]}\n"
+ "{[Customers].[USA].[CA].[Daly City]}\n"
+ "{[Customers].[USA].[CA].[Downey]}\n"
+ "{[Customers].[USA].[CA].[El Cajon]}\n"
+ "{[Customers].[USA].[CA].[Fremont]}\n"
+ "{[Customers].[USA].[CA].[Glendale]}\n"
+ "{[Customers].[USA].[CA].[Grossmont]}\n"
+ "{[Customers].[USA].[CA].[Imperial Beach]}\n"
+ "{[Customers].[USA].[CA].[La Jolla]}\n"
+ "{[Customers].[USA].[CA].[La Mesa]}\n"
+ "{[Customers].[USA].[CA].[Lakewood]}\n"
+ "{[Customers].[USA].[CA].[Lemon Grove]}\n"
+ "{[Customers].[USA].[CA].[Lincoln Acres]}\n"
+ "{[Customers].[USA].[CA].[Long Beach]}\n"
+ "{[Customers].[USA].[CA].[Los Angeles]}\n"
+ "{[Customers].[USA].[CA].[Mill Valley]}\n"
+ "{[Customers].[USA].[CA].[National City]}\n"
+ "{[Customers].[USA].[CA].[Newport Beach]}\n"
+ "{[Customers].[USA].[CA].[Novato]}\n"
+ "{[Customers].[USA].[CA].[Oakland]}\n"
+ "{[Customers].[USA].[CA].[Palo Alto]}\n"
+ "{[Customers].[USA].[CA].[Pomona]}\n"
+ "{[Customers].[USA].[CA].[Redwood City]}\n"
+ "{[Customers].[USA].[CA].[Richmond]}\n"
+ "{[Customers].[USA].[CA].[San Carlos]}\n"
+ "{[Customers].[USA].[CA].[San Diego]}\n"
+ "{[Customers].[USA].[CA].[San Francisco]}\n"
+ "{[Customers].[USA].[CA].[San Gabriel]}\n"
+ "{[Customers].[USA].[CA].[San Jose]}\n"
+ "{[Customers].[USA].[CA].[Santa Cruz]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica]}\n"
+ "{[Customers].[USA].[CA].[Spring Valley]}\n"
+ "{[Customers].[USA].[CA].[Torrance]}\n"
+ "{[Customers].[USA].[CA].[West Covina]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "{[Customers].[USA].[WA]}\n"
+ "Row #0: 2,574\n"
+ "Row #0: 5,585.59\n"
+ "Row #1: 2,440\n"
+ "Row #1: 5,136.59\n"
+ "Row #2: 3,106\n"
+ "Row #2: 6,633.97\n"
+ "Row #3: 136\n"
+ "Row #3: 320.17\n"
+ "Row #4: 2,907\n"
+ "Row #4: 6,194.37\n"
+ "Row #5: 3,086\n"
+ "Row #5: 6,577.33\n"
+ "Row #6: 198\n"
+ "Row #6: 407.38\n"
+ "Row #7: 2,999\n"
+ "Row #7: 6,284.30\n"
+ "Row #8: 129\n"
+ "Row #8: 287.78\n"
+ "Row #9: 105\n"
+ "Row #9: 219.77\n"
+ "Row #10: 2,391\n"
+ "Row #10: 5,051.15\n"
+ "Row #11: 129\n"
+ "Row #11: 271.60\n"
+ "Row #12: 3,440\n"
+ "Row #12: 7,367.06\n"
+ "Row #13: 2,543\n"
+ "Row #13: 5,460.42\n"
+ "Row #14: 163\n"
+ "Row #14: 350.22\n"
+ "Row #15: 3,284\n"
+ "Row #15: 7,082.91\n"
+ "Row #16: 2,131\n"
+ "Row #16: 4,458.60\n"
+ "Row #17: 1,616\n"
+ "Row #17: 3,409.34\n"
+ "Row #18: 1,938\n"
+ "Row #18: 4,081.37\n"
+ "Row #19: 1,834\n"
+ "Row #19: 3,908.26\n"
+ "Row #20: 2,487\n"
+ "Row #20: 5,174.12\n"
+ "Row #21: 2,651\n"
+ "Row #21: 5,636.82\n"
+ "Row #22: 2,176\n"
+ "Row #22: 4,691.94\n"
+ "Row #23: 2,973\n"
+ "Row #23: 6,422.37\n"
+ "Row #24: 2,009\n"
+ "Row #24: 4,312.99\n"
+ "Row #25: 58\n"
+ "Row #25: 109.36\n"
+ "Row #26: 2,031\n"
+ "Row #26: 4,237.46\n"
+ "Row #27: 3,098\n"
+ "Row #27: 6,696.06\n"
+ "Row #28: 163\n"
+ "Row #28: 335.98\n"
+ "Row #29: 70\n"
+ "Row #29: 145.90\n"
+ "Row #30: 133\n"
+ "Row #30: 272.08\n"
+ "Row #31: 2,712\n"
+ "Row #31: 5,595.62\n"
+ "Row #32: 144\n"
+ "Row #32: 312.43\n"
+ "Row #33: 110\n"
+ "Row #33: 212.45\n"
+ "Row #34: 145\n"
+ "Row #34: 289.80\n"
+ "Row #35: 1,535\n"
+ "Row #35: 3,348.69\n"
+ "Row #36: 88\n"
+ "Row #36: 195.28\n"
+ "Row #37: 2,631\n"
+ "Row #37: 5,663.60\n"
+ "Row #38: 161\n"
+ "Row #38: 343.20\n"
+ "Row #39: 185\n"
+ "Row #39: 367.78\n"
+ "Row #40: 2,660\n"
+ "Row #40: 5,739.63\n"
+ "Row #41: 1,790\n"
+ "Row #41: 3,862.79\n"
+ "Row #42: 2,570\n"
+ "Row #42: 5,405.02\n"
+ "Row #43: 2,503\n"
+ "Row #43: 5,302.08\n"
+ "Row #44: 2,516\n"
+ "Row #44: 5,406.21\n"
+ "Row #45: 67,659\n"
+ "Row #45: 142,277.07\n"
+ "Row #46: 124,366\n"
+ "Row #46: 263,793.22\n");
}
}
}
// End FunctionTest.java
|
testsrc/main/mondrian/olap/fun/FunctionTest.java
|
/*
// This software is subject to the terms of the Eclipse Public License v1.0
// Agreement, available at the following URL:
// http://www.eclipse.org/legal/epl-v10.html.
// You must accept the terms of that agreement to use this software.
//
// Copyright (C) 2003-2005 Julian Hyde
// Copyright (C) 2005-2012 Pentaho and others
// All Rights Reserved.
*/
package mondrian.olap.fun;
import mondrian.olap.*;
import mondrian.resource.MondrianResource;
import mondrian.test.FoodMartTestCase;
import mondrian.test.TestContext;
import mondrian.udf.*;
import mondrian.util.Bug;
import junit.framework.Assert;
import junit.framework.ComparisonFailure;
import org.apache.log4j.Logger;
import org.eigenbase.xom.StringEscaper;
import java.io.*;
import java.util.*;
/**
* <code>FunctionTest</code> tests the functions defined in
* {@link BuiltinFunTable}.
*
* @author gjohnson
*/
public class FunctionTest extends FoodMartTestCase {
private static final Logger LOGGER = Logger.getLogger(FunctionTest.class);
private static final String months =
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]";
private static final String quarters =
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]";
private static final String year1997 = "[Time].[1997]";
private static final String hierarchized1997 =
year1997
+ "\n"
+ "[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]";
private static final String NullNumericExpr =
" ([Measures].[Unit Sales],"
+ " [Customers].[All Customers].[USA].[CA].[Bellflower], "
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]."
+ "[Beer and Wine].[Beer].[Good].[Good Imported Beer])";
private static final String TimeWeekly =
MondrianProperties.instance().SsasCompatibleNaming.get()
? "[Time].[Weekly]"
: "[Time.Weekly]";
// ~ Constructors ----------------------------------------------------------
/**
* Creates a FunctionTest.
*/
public FunctionTest() {
}
/**
* Creates a FuncionTest with an explicit name.
*
* @param s Test name
*/
public FunctionTest(String s) {
super(s);
}
// ~ Methods ---------------------------------------------------------------
// ~ Test methods ----------------------------------------------------------
/**
* Tests that Integeer.MIN_VALUE(-2147483648) does not cause NPE.
*/
public void testParallelPeriodMinValue() {
executeQuery(
"with "
+ "member [measures].[foo] as "
+ "'([Measures].[unit sales],"
+ "ParallelPeriod([Time].[Quarter], -2147483648))' "
+ "select "
+ "[measures].[foo] on columns, "
+ "[time].[1997].children on rows "
+ "from [sales]");
}
/**
* Tests that Integeer.MIN_VALUE(-2147483648) in Lag is handled correctly.
*/
public void testLagMinValue() {
executeQuery(
"with "
+ "member [measures].[foo] as "
+ "'([Measures].[unit sales], [Time].[1997].[Q1].Lag(-2147483648))' "
+ "select "
+ "[measures].[foo] on columns, "
+ "[time].[1997].children on rows "
+ "from [sales]");
}
/*
* Tests that ParallelPeriod with Aggregate function works
*/
public void testParallelPeriodWithSlicer() {
assertQueryReturns(
"With "
+ "Set [*NATIVE_CJ_SET] as 'NonEmptyCrossJoin([*BASE_MEMBERS_Time],[*BASE_MEMBERS_Product])' "
+ "Set [*BASE_MEMBERS_Measures] as '{[Measures].[*FORMATTED_MEASURE_0], [Measures].[*FORMATTED_MEASURE_1]}' "
+ "Set [*BASE_MEMBERS_Time] as '{[Time].[1997].[Q2].[6]}' "
+ "Set [*NATIVE_MEMBERS_Time] as 'Generate([*NATIVE_CJ_SET], {[Time].[Time].CurrentMember})' "
+ "Set [*BASE_MEMBERS_Product] as '{[Product].[All Products].[Drink],[Product].[All Products].[Food]}' "
+ "Set [*NATIVE_MEMBERS_Product] as 'Generate([*NATIVE_CJ_SET], {[Product].CurrentMember})' "
+ "Member [Measures].[*FORMATTED_MEASURE_0] as '[Measures].[Customer Count]', FORMAT_STRING = '#,##0', SOLVE_ORDER=400 "
+ "Member [Measures].[*FORMATTED_MEASURE_1] as "
+ "'([Measures].[Customer Count], ParallelPeriod([Time].[Quarter], 1, [Time].[Time].currentMember))', FORMAT_STRING = '#,##0', SOLVE_ORDER=-200 "
+ "Member [Product].[*FILTER_MEMBER] as 'Aggregate ([*NATIVE_MEMBERS_Product])', SOLVE_ORDER=-300 "
+ "Select "
+ "[*BASE_MEMBERS_Measures] on columns, Non Empty Generate([*NATIVE_CJ_SET], {([Time].[Time].CurrentMember)}) on rows "
+ "From [Sales] "
+ "Where ([Product].[*FILTER_MEMBER])",
"Axis #0:\n"
+ "{[Product].[*FILTER_MEMBER]}\n"
+ "Axis #1:\n"
+ "{[Measures].[*FORMATTED_MEASURE_0]}\n"
+ "{[Measures].[*FORMATTED_MEASURE_1]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "Row #0: 1,314\n"
+ "Row #0: 1,447\n");
}
public void testParallelperiodOnLevelsString() {
assertQueryReturns(
"with member Measures.[Prev Unit Sales] as 'parallelperiod(Levels(\"[Time].[Month]\"))'\n"
+ "select {[Measures].[Unit Sales], Measures.[Prev Unit Sales]} ON COLUMNS,\n"
+ "[Gender].members ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Prev Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 21,081\n"
+ "Row #0: 20,179\n"
+ "Row #1: 10,536\n"
+ "Row #1: 9,990\n"
+ "Row #2: 10,545\n"
+ "Row #2: 10,189\n");
}
public void testParallelperiodOnStrToMember() {
assertQueryReturns(
"with member Measures.[Prev Unit Sales] as 'parallelperiod(strToMember(\"[Time].[1997].[Q2]\"))'\n"
+ "select {[Measures].[Unit Sales], Measures.[Prev Unit Sales]} ON COLUMNS,\n"
+ "[Gender].members ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Prev Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 21,081\n"
+ "Row #0: 20,957\n"
+ "Row #1: 10,536\n"
+ "Row #1: 10,266\n"
+ "Row #2: 10,545\n"
+ "Row #2: 10,691\n");
assertQueryThrows(
"with member Measures.[Prev Unit Sales] as 'parallelperiod(strToMember(\"[Time].[Quarter]\"))'\n"
+ "select {[Measures].[Unit Sales], Measures.[Prev Unit Sales]} ON COLUMNS,\n"
+ "[Gender].members ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Cannot find MDX member '[Time].[Quarter]'. Make sure it is indeed a member and not a level or a hierarchy.");
}
public void testNumericLiteral() {
assertExprReturns("2", "2");
if (false) {
// The test is currently broken because the value 2.5 is formatted
// as "2". TODO: better default format string
assertExprReturns("2.5", "2.5");
}
assertExprReturns("-10.0", "-10");
getTestContext().assertExprDependsOn("1.5", "{}");
}
public void testStringLiteral() {
// single-quoted string
if (false) {
// TODO: enhance parser so that you can include a quoted string
// inside a WITH MEMBER clause
assertExprReturns("'foobar'", "foobar");
}
// double-quoted string
assertExprReturns("\"foobar\"", "foobar");
// literals don't depend on any dimensions
getTestContext().assertExprDependsOn("\"foobar\"", "{}");
}
public void testDimensionHierarchy() {
assertExprReturns("[Time].Dimension.Name", "Time");
}
public void testLevelDimension() {
assertExprReturns("[Time].[Year].Dimension.UniqueName", "[Time]");
}
public void testMemberDimension() {
assertExprReturns("[Time].[1997].[Q2].Dimension.UniqueName", "[Time]");
}
public void testDimensionsNumeric() {
getTestContext().assertExprDependsOn("Dimensions(2).Name", "{}");
getTestContext().assertMemberExprDependsOn(
"Dimensions(3).CurrentMember",
TestContext.allHiers());
assertExprReturns("Dimensions(2).Name", "Store Size in SQFT");
// bug 1426134 -- Dimensions(0) throws 'Index '0' out of bounds'
assertExprReturns("Dimensions(0).Name", "Measures");
assertExprThrows("Dimensions(-1).Name", "Index '-1' out of bounds");
assertExprThrows("Dimensions(100).Name", "Index '100' out of bounds");
// Since Dimensions returns a Hierarchy, can apply CurrentMember.
assertAxisReturns(
"Dimensions(3).CurrentMember",
"[Store Type].[All Store Types]");
}
public void testDimensionsString() {
getTestContext().assertExprDependsOn(
"Dimensions(\"foo\").UniqueName",
"{}");
getTestContext().assertMemberExprDependsOn(
"Dimensions(\"foo\").CurrentMember", TestContext.allHiers());
assertExprReturns("Dimensions(\"Store\").UniqueName", "[Store]");
// Since Dimensions returns a Hierarchy, can apply Children.
assertAxisReturns(
"Dimensions(\"Store\").Children",
"[Store].[Canada]\n"
+ "[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testDimensionsDepends() {
final String expression =
"Crossjoin("
+ "{Dimensions(\"Measures\").CurrentMember.Hierarchy.CurrentMember}, "
+ "{Dimensions(\"Product\")})";
assertAxisReturns(
expression, "{[Measures].[Unit Sales], [Product].[All Products]}");
getTestContext().assertSetExprDependsOn(
expression, TestContext.allHiers());
}
public void testTime() {
assertExprReturns(
"[Time].[1997].[Q1].[1].Hierarchy.UniqueName", "[Time]");
}
public void testBasic9() {
assertExprReturns(
"[Gender].[All Gender].[F].Hierarchy.UniqueName", "[Gender]");
}
public void testFirstInLevel9() {
assertExprReturns(
"[Education Level].[All Education Levels].[Bachelors Degree].Hierarchy.UniqueName",
"[Education Level]");
}
public void testHierarchyAll() {
assertExprReturns(
"[Gender].[All Gender].Hierarchy.UniqueName", "[Gender]");
}
public void testNullMember() {
// MSAS fails here, but Mondrian doesn't.
assertExprReturns(
"[Gender].[All Gender].Parent.Level.UniqueName",
"[Gender].[(All)]");
// MSAS fails here, but Mondrian doesn't.
assertExprReturns(
"[Gender].[All Gender].Parent.Hierarchy.UniqueName", "[Gender]");
// MSAS fails here, but Mondrian doesn't.
assertExprReturns(
"[Gender].[All Gender].Parent.Dimension.UniqueName", "[Gender]");
// MSAS succeeds too
assertExprReturns(
"[Gender].[All Gender].Parent.Children.Count", "0");
if (isDefaultNullMemberRepresentation()) {
// MSAS returns "" here.
assertExprReturns(
"[Gender].[All Gender].Parent.UniqueName", "[Gender].[#null]");
// MSAS returns "" here.
assertExprReturns(
"[Gender].[All Gender].Parent.Name", "#null");
}
}
/**
* Tests use of NULL literal to generate a null cell value.
* Testcase is from bug 1440344.
*/
public void testNullValue() {
assertQueryReturns(
"with member [Measures].[X] as 'IIF([Measures].[Store Sales]>10000,[Measures].[Store Sales],Null)'\n"
+ "select\n"
+ "{[Measures].[X]} on columns,\n"
+ "{[Product].[Product Department].members} on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[X]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Food].[Baked Goods]}\n"
+ "{[Product].[Food].[Baking Goods]}\n"
+ "{[Product].[Food].[Breakfast Foods]}\n"
+ "{[Product].[Food].[Canned Foods]}\n"
+ "{[Product].[Food].[Canned Products]}\n"
+ "{[Product].[Food].[Dairy]}\n"
+ "{[Product].[Food].[Deli]}\n"
+ "{[Product].[Food].[Eggs]}\n"
+ "{[Product].[Food].[Frozen Foods]}\n"
+ "{[Product].[Food].[Meat]}\n"
+ "{[Product].[Food].[Produce]}\n"
+ "{[Product].[Food].[Seafood]}\n"
+ "{[Product].[Food].[Snack Foods]}\n"
+ "{[Product].[Food].[Snacks]}\n"
+ "{[Product].[Food].[Starchy Foods]}\n"
+ "{[Product].[Non-Consumable].[Carousel]}\n"
+ "{[Product].[Non-Consumable].[Checkout]}\n"
+ "{[Product].[Non-Consumable].[Health and Hygiene]}\n"
+ "{[Product].[Non-Consumable].[Household]}\n"
+ "{[Product].[Non-Consumable].[Periodicals]}\n"
+ "Row #0: 14,029.08\n"
+ "Row #1: 27,748.53\n"
+ "Row #2: \n"
+ "Row #3: 16,455.43\n"
+ "Row #4: 38,670.41\n"
+ "Row #5: \n"
+ "Row #6: 39,774.34\n"
+ "Row #7: \n"
+ "Row #8: 30,508.85\n"
+ "Row #9: 25,318.93\n"
+ "Row #10: \n"
+ "Row #11: 55,207.50\n"
+ "Row #12: \n"
+ "Row #13: 82,248.42\n"
+ "Row #14: \n"
+ "Row #15: 67,609.82\n"
+ "Row #16: 14,550.05\n"
+ "Row #17: 11,756.07\n"
+ "Row #18: \n"
+ "Row #19: \n"
+ "Row #20: 32,571.86\n"
+ "Row #21: 60,469.89\n"
+ "Row #22: \n");
}
public void testNullInMultiplication() {
assertExprReturns("NULL*1", "");
assertExprReturns("1*NULL", "");
assertExprReturns("NULL*NULL", "");
}
public void testNullInAddition() {
assertExprReturns("1+NULL", "1");
assertExprReturns("NULL+1", "1");
}
public void testNullInSubtraction() {
assertExprReturns("1-NULL", "1");
assertExprReturns("NULL-1", "-1");
}
public void testMemberLevel() {
assertExprReturns(
"[Time].[1997].[Q1].[1].Level.UniqueName",
"[Time].[Month]");
}
public void testLevelsNumeric() {
assertExprReturns("[Time].[Time].Levels(2).Name", "Month");
assertExprReturns("[Time].[Time].Levels(0).Name", "Year");
assertExprReturns("[Product].Levels(0).Name", "(All)");
}
public void testLevelsTooSmall() {
assertExprThrows(
"[Time].[Time].Levels(-1).Name", "Index '-1' out of bounds");
}
public void testLevelsTooLarge() {
assertExprThrows(
"[Time].[Time].Levels(8).Name", "Index '8' out of bounds");
}
public void testHierarchyLevelsString() {
assertExprReturns(
"[Time].[Time].Levels(\"Year\").UniqueName", "[Time].[Year]");
}
public void testHierarchyLevelsStringFail() {
assertExprThrows(
"[Time].[Time].Levels(\"nonexistent\").UniqueName",
"Level 'nonexistent' not found in hierarchy '[Time]'");
}
public void testLevelsString() {
assertExprReturns(
"Levels(\"[Time].[Year]\").UniqueName",
"[Time].[Year]");
}
public void testLevelsStringFail() {
assertExprThrows(
"Levels(\"nonexistent\").UniqueName",
"Level 'nonexistent' not found");
}
public void testIsEmptyQuery() {
String desiredResult =
"Axis #0:\n"
+ "{[Time].[1997].[Q4].[12], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer], [Measures].[Foo]}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "{[Store].[USA].[WA].[Bremerton]}\n"
+ "{[Store].[USA].[WA].[Seattle]}\n"
+ "{[Store].[USA].[WA].[Spokane]}\n"
+ "{[Store].[USA].[WA].[Tacoma]}\n"
+ "{[Store].[USA].[WA].[Walla Walla]}\n"
+ "{[Store].[USA].[WA].[Yakima]}\n"
+ "Row #0: 5\n"
+ "Row #0: 5\n"
+ "Row #0: 2\n"
+ "Row #0: 5\n"
+ "Row #0: 11\n"
+ "Row #0: 5\n"
+ "Row #0: 4\n";
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS 'Iif(IsEmpty([Measures].[Unit Sales]), 5, [Measures].[Unit Sales])'\n"
+ "SELECT {[Store].[USA].[WA].children} on columns\n"
+ "FROM Sales\n"
+ "WHERE ([Time].[1997].[Q4].[12],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Measures].[Foo])",
desiredResult);
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS 'Iif([Measures].[Unit Sales] IS EMPTY, 5, [Measures].[Unit Sales])'\n"
+ "SELECT {[Store].[USA].[WA].children} on columns\n"
+ "FROM Sales\n"
+ "WHERE ([Time].[1997].[Q4].[12],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Measures].[Foo])",
desiredResult);
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS 'Iif([Measures].[Bar] IS EMPTY, 1, [Measures].[Bar])'\n"
+ "MEMBER [Measures].[Bar] AS 'CAST(\"42\" AS INTEGER)'\n"
+ "SELECT {[Measures].[Unit Sales], [Measures].[Foo]} on columns\n"
+ "FROM Sales\n"
+ "WHERE ([Time].[1998].[Q4].[12])",
"Axis #0:\n"
+ "{[Time].[1998].[Q4].[12]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: \n"
+ "Row #0: 42\n");
}
public void testIsEmptyWithAggregate() {
assertQueryReturns(
"WITH MEMBER [gender].[foo] AS 'isEmpty(Aggregate({[Gender].m}))' "
+ "SELECT {Gender.foo} on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Gender].[foo]}\n"
+ "Row #0: false\n");
}
public void testIsEmpty()
{
assertBooleanExprReturns("[Gender].[All Gender].Parent IS NULL", true);
// Any functions that return a member from parameters that
// include a member and that member is NULL also give a NULL.
// Not a runtime exception.
assertBooleanExprReturns(
"[Gender].CurrentMember.Parent.NextMember IS NULL",
true);
if (!Bug.BugMondrian207Fixed) {
return;
}
// When resolving a tuple's value in the cube, if there is
// at least one NULL member in the tuple should return a
// NULL cell value.
assertBooleanExprReturns(
"IsEmpty(([Time].currentMember.Parent, [Measures].[Unit Sales]))",
false);
assertBooleanExprReturns(
"IsEmpty(([Time].currentMember, [Measures].[Unit Sales]))",
false);
// EMPTY refers to a genuine cell value that exists in the cube space,
// and has no NULL members in the tuple,
// but has no fact data at that crossing,
// so it evaluates to EMPTY as a cell value.
assertBooleanExprReturns(
"IsEmpty(\n"
+ " ([Time].[1997].[Q4].[12],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Store].[All Stores].[USA].[WA].[Bellingham]))", true);
assertBooleanExprReturns(
"IsEmpty(\n"
+ " ([Time].[1997].[Q4].[11],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth].[Portsmouth Imported Beer],\n"
+ " [Store].[All Stores].[USA].[WA].[Bellingham]))", false);
// The empty set is neither EMPTY nor NULL.
// should give 0 as a result, not NULL and not EMPTY.
assertQueryReturns(
"WITH SET [empty set] AS '{}'\n"
+ " MEMBER [Measures].[Set Size] AS 'Count([empty set])'\n"
+ " MEMBER [Measures].[Set Size Is Empty] AS 'CASE WHEN IsEmpty([Measures].[Set Size]) THEN 1 ELSE 0 END '\n"
+ "SELECT [Measures].[Set Size] on columns", "");
assertQueryReturns(
"WITH SET [empty set] AS '{}'\n"
+ "WITH MEMBER [Measures].[Set Size] AS 'Count([empty set])'\n"
+ "SELECT [Measures].[Set Size] on columns", "");
// Run time errors are BAD things. They should not occur
// in almost all cases. In fact there should be no
// logically formed MDX that generates them. An ERROR
// value in a cell though is perfectly legal - e.g. a
// divide by 0.
// E.g.
String foo =
"WITH [Measures].[Ratio This Period to Previous] as\n"
+ "'([Measures].[Sales],[Time].CurrentMember/([Measures].[Sales],[Time].CurrentMember.PrevMember)'\n"
+ "SELECT [Measures].[Ratio This Period to Previous] ON COLUMNS,\n"
+ "[Time].Members ON ROWS\n"
+ "FROM ...";
// For the [Time].[All Time] row as well as the first
// year, first month etc, the PrevMember will evaluate to
// NULL, the tuple will evaluate to NULL and the division
// will implicitly convert the NULL to 0 and then evaluate
// to an ERROR value due to a divide by 0.
// This leads to another point: NULL and EMPTY values get
// implicitly converted to 0 when treated as numeric
// values for division and multiplication but for addition
// and subtraction, NULL is treated as NULL (5+NULL yields
// NULL).
// I have no idea about how EMPTY works. I.e. is does
// 5+EMPTY yield 5 or EMPTY or NULL or what?
// E.g.
String foo2 =
"WITH MEMBER [Measures].[5 plus empty] AS\n"
+ "'5+([Product].[All Products].[Ski boots],[Geography].[All Geography].[Hawaii])'\n"
+ "SELECT [Measures].[5 plus empty] ON COLUMNS\n"
+ "FROM ...";
// Does this yield EMPTY, 5, NULL or ERROR?
// Lastly, IS NULL and IS EMPTY are both legal and
// distinct. <<Object>> IS {<<Object>> | NULL} and
// <<Value>> IS EMPTY.
// E.g.
// a) [Time].CurrentMember.Parent IS [Time].[Year].[2004]
// is also a perfectly legal expression and better than
// [Time].CurrentMember.Parent.Name="2004".
// b) ([Measures].[Sales],[Time].FirstSibling) IS EMPTY is
// a legal expression.
// Microsoft's site says that the EMPTY value participates in 3 value
// logic e.g. TRUE AND EMPTY gives EMPTY, FALSE AND EMPTY gives FALSE.
// todo: test for this
}
public void testQueryWithoutValidMeasure() {
assertQueryReturns(
"with\n"
+ "member measures.[without VM] as ' [measures].[unit sales] '\n"
+ "select {measures.[without VM] } on 0,\n"
+ "[Warehouse].[Country].members on 1 from [warehouse and sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[without VM]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[Canada]}\n"
+ "{[Warehouse].[Mexico]}\n"
+ "{[Warehouse].[USA]}\n"
+ "Row #0: \n"
+ "Row #1: \n"
+ "Row #2: \n");
}
/** Tests the <code>ValidMeasure</code> function. */
public void testValidMeasure() {
assertQueryReturns(
"with\n"
+ "member measures.[with VM] as 'validmeasure([measures].[unit sales])'\n"
+ "select { measures.[with VM]} on 0,\n"
+ "[Warehouse].[Country].members on 1 from [warehouse and sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[with VM]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[Canada]}\n"
+ "{[Warehouse].[Mexico]}\n"
+ "{[Warehouse].[USA]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 266,773\n"
+ "Row #2: 266,773\n");
}
public void _testValidMeasureNonEmpty() {
// Note that [with VM2] is NULL where it needs to be - and therefore
// does not prevent NON EMPTY from eliminating empty rows.
assertQueryReturns(
"with set [Foo] as ' Crossjoin({[Time].Children}, {[Measures].[Warehouse Sales]}) '\n"
+ " member [Measures].[with VM] as 'ValidMeasure([Measures].[Unit Sales])'\n"
+ " member [Measures].[with VM2] as 'Iif(Count(Filter([Foo], not isempty([Measures].CurrentMember))) > 0, ValidMeasure([Measures].[Unit Sales]), NULL)'\n"
+ "select NON EMPTY Crossjoin({[Time].Children}, {[Measures].[with VM2], [Measures].[Warehouse Sales]}) ON COLUMNS,\n"
+ " NON EMPTY {[Warehouse].[All Warehouses].[USA].[WA].Children} ON ROWS\n"
+ "from [Warehouse and Sales]\n"
+ "where [Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]",
"Axis #0:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1], [Measures].[with VM2]}\n"
+ "{[Time].[1997].[Q1], [Measures].[Warehouse Sales]}\n"
+ "{[Time].[1997].[Q2], [Measures].[with VM2]}\n"
+ "{[Time].[1997].[Q2], [Measures].[Warehouse Sales]}\n"
+ "{[Time].[1997].[Q3], [Measures].[with VM2]}\n"
+ "{[Time].[1997].[Q4], [Measures].[with VM2]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[USA].[WA].[Seattle]}\n"
+ "{[Warehouse].[USA].[WA].[Tacoma]}\n"
+ "{[Warehouse].[USA].[WA].[Yakima]}\n"
+ "Row #0: 26\n"
+ "Row #0: 34.793\n"
+ "Row #0: 25\n"
+ "Row #0: \n"
+ "Row #0: 36\n"
+ "Row #0: 28\n"
+ "Row #1: 26\n"
+ "Row #1: \n"
+ "Row #1: 25\n"
+ "Row #1: 64.615\n"
+ "Row #1: 36\n"
+ "Row #1: 28\n"
+ "Row #2: 26\n"
+ "Row #2: 79.657\n"
+ "Row #2: 25\n"
+ "Row #2: \n"
+ "Row #2: 36\n"
+ "Row #2: 28\n");
}
public void testValidMeasureTupleHasAnotherMember() {
assertQueryReturns(
"with\n"
+ "member measures.[with VM] as 'validmeasure(([measures].[unit sales],[customers].[all customers]))'\n"
+ "select { measures.[with VM]} on 0,\n"
+ "[Warehouse].[Country].members on 1 from [warehouse and sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[with VM]}\n"
+ "Axis #2:\n"
+ "{[Warehouse].[Canada]}\n"
+ "{[Warehouse].[Mexico]}\n"
+ "{[Warehouse].[USA]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 266,773\n"
+ "Row #2: 266,773\n");
}
public void testValidMeasureDepends() {
String s12 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"ValidMeasure([Measures].[Unit Sales])", s12);
String s11 = TestContext.allHiersExcept("[Measures]", "[Time]");
getTestContext().assertExprDependsOn(
"ValidMeasure(([Measures].[Unit Sales], [Time].[1997].[Q1]))", s11);
String s1 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"ValidMeasure(([Measures].[Unit Sales], "
+ "[Time].[Time].CurrentMember.Parent))",
s1);
}
public void testAncestor() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA].[Los Angeles],[Store Country])");
Assert.assertEquals("USA", member.getName());
assertAxisThrows(
"Ancestor([Store].[USA].[CA].[Los Angeles],[Promotions].[Promotion Name])",
"Error while executing query");
}
public void testAncestorNumeric() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA].[Los Angeles],1)");
Assert.assertEquals("CA", member.getName());
member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA].[Los Angeles], 0)");
Assert.assertEquals("Los Angeles", member.getName());
final TestContext testContextRagged =
getTestContext().withCube("[Sales Ragged]");
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Vatican], 1)");
Assert.assertEquals("All Stores", member.getName());
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[USA].[Washington], 1)");
Assert.assertEquals("USA", member.getName());
// complicated way to say "1".
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[USA].[Washington], 7 * 6 - 41)");
Assert.assertEquals("USA", member.getName());
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Vatican], 2)");
Assert.assertNull("Ancestor at 2 must be null", member);
member =
testContextRagged.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Vatican], -5)");
Assert.assertNull("Ancestor at -5 must be null", member);
}
public void testAncestorHigher() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA],[Store].[Store City])");
Assert.assertNull(member); // MSOLAP returns null
}
public void testAncestorSameLevel() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[Canada],[Store].[Store Country])");
Assert.assertEquals("Canada", member.getName());
}
public void testAncestorWrongHierarchy() {
// MSOLAP gives error "Formula error - dimensions are not
// valid (they do not match) - in the Ancestor function"
assertAxisThrows(
"Ancestor([Gender].[M],[Store].[Store Country])",
"Error while executing query");
}
public void testAncestorAllLevel() {
Member member =
executeSingletonAxis(
"Ancestor([Store].[USA].[CA],[Store].Levels(0))");
Assert.assertTrue(member.isAll());
}
public void testAncestorWithHiddenParent() {
final TestContext testContext =
getTestContext().withCube("[Sales Ragged]");
Member member =
testContext.executeSingletonAxis(
"Ancestor([Store].[All Stores].[Israel].[Haifa], "
+ "[Store].[Store Country])");
assertNotNull("Member must not be null.", member);
Assert.assertEquals("Israel", member.getName());
}
public void testAncestorDepends() {
getTestContext().assertExprDependsOn(
"Ancestor([Store].CurrentMember, [Store].[Store Country]).Name",
"{[Store]}");
getTestContext().assertExprDependsOn(
"Ancestor([Store].[All Stores].[USA], "
+ "[Store].CurrentMember.Level).Name",
"{[Store]}");
getTestContext().assertExprDependsOn(
"Ancestor([Store].[All Stores].[USA], "
+ "[Store].[Store Country]).Name",
"{}");
getTestContext().assertExprDependsOn(
"Ancestor([Store].CurrentMember, 2+1).Name", "{[Store]}");
}
public void testAncestors() {
// Test that we can execute Ancestors by passing a level as
// the depth argument (PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Employees].[All Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff].[Teanna Cobb], [Employees].[All Employees].Level)'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [HR]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply]}\n"
+ "{[Employees].[Sheri Nowmer]}\n"
+ "{[Employees].[All Employees]}\n"
+ "Row #0: $984.45\n"
+ "Row #0: $3,426.54\n"
+ "Row #0: $3,610.14\n"
+ "Row #0: $17,099.20\n"
+ "Row #0: $36,494.07\n"
+ "Row #0: $39,431.67\n"
+ "Row #0: $39,431.67\n");
// Test that we can execute Ancestors by passing a level as
// the depth argument (non PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Store].[USA].[CA].[Los Angeles], [Store].[Store Country])'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [Sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 74,748\n"
+ "Row #0: 266,773\n");
// Test that we can execute Ancestors by passing an integer as
// the depth argument (PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Employees].[All Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff].[Teanna Cobb], 3)'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [HR]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds]}\n"
+ "{[Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long]}\n"
+ "Row #0: $984.45\n"
+ "Row #0: $3,426.54\n"
+ "Row #0: $3,610.14\n");
// Test that we can execute Ancestors by passing an integer as
// the depth argument (non PC hierarchy)
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Store].[USA].[CA].[Los Angeles], 2)'\n"
+ "select\n"
+ " [*ancestors] on columns\n"
+ "from [Sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 74,748\n"
+ "Row #0: 266,773\n");
// Test that we can count the number of ancestors.
assertQueryReturns(
"with\n"
+ "set [*ancestors] as\n"
+ " 'Ancestors([Employees].[All Employees].[Sheri Nowmer].[Derrick Whelply].[Laurie Borges].[Eric Long].[Adam Reynolds].[Joshua Huff].[Teanna Cobb], [Employees].[All Employees].Level)'\n"
+ "member [Measures].[Depth] as\n"
+ " 'Count([*ancestors])'\n"
+ "select\n"
+ " [Measures].[Depth] on columns\n"
+ "from [HR]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Depth]}\n"
+ "Row #0: 7\n");
// test depth argument not a level
assertAxisThrows(
"Ancestors([Store].[USA].[CA].[Los Angeles],[Store])",
"Error while executing query");
}
public void testOrdinal() {
final TestContext testContext =
getTestContext().withCube("Sales Ragged");
Cell cell =
testContext.executeExprRaw(
"[Store].[All Stores].[Vatican].ordinal");
assertEquals(
"Vatican is at level 1.",
1,
((Number)cell.getValue()).intValue());
cell = testContext.executeExprRaw(
"[Store].[All Stores].[USA].[Washington].ordinal");
assertEquals(
"Washington is at level 3.",
3,
((Number) cell.getValue()).intValue());
}
public void testClosingPeriodNoArgs() {
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod()", "{[Time]}");
// MSOLAP returns [1997].[Q4], because [Time].CurrentMember =
// [1997].
Member member = executeSingletonAxis("ClosingPeriod()");
Assert.assertEquals("[Time].[1997].[Q4]", member.getUniqueName());
}
public void testClosingPeriodLevel() {
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod([Time].[Year])", "{[Time]}");
getTestContext().assertMemberExprDependsOn(
"([Measures].[Unit Sales], ClosingPeriod([Time].[Month]))",
"{[Time]}");
Member member;
member = executeSingletonAxis("ClosingPeriod([Year])");
Assert.assertEquals("[Time].[1997]", member.getUniqueName());
member = executeSingletonAxis("ClosingPeriod([Quarter])");
Assert.assertEquals("[Time].[1997].[Q4]", member.getUniqueName());
member = executeSingletonAxis("ClosingPeriod([Month])");
Assert.assertEquals("[Time].[1997].[Q4].[12]", member.getUniqueName());
assertQueryReturns(
"with member [Measures].[Closing Unit Sales] as "
+ "'([Measures].[Unit Sales], ClosingPeriod([Time].[Month]))'\n"
+ "select non empty {[Measures].[Closing Unit Sales]} on columns,\n"
+ " {Descendants([Time].[1997])} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Closing Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 26,796\n"
+ "Row #1: 23,706\n"
+ "Row #2: 21,628\n"
+ "Row #3: 20,957\n"
+ "Row #4: 23,706\n"
+ "Row #5: 21,350\n"
+ "Row #6: 20,179\n"
+ "Row #7: 21,081\n"
+ "Row #8: 21,350\n"
+ "Row #9: 20,388\n"
+ "Row #10: 23,763\n"
+ "Row #11: 21,697\n"
+ "Row #12: 20,388\n"
+ "Row #13: 26,796\n"
+ "Row #14: 19,958\n"
+ "Row #15: 25,270\n"
+ "Row #16: 26,796\n");
assertQueryReturns(
"with member [Measures].[Closing Unit Sales] as '([Measures].[Unit Sales], ClosingPeriod([Time].[Month]))'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Closing Unit Sales]} on columns,\n"
+ " {[Time].[1997], [Time].[1997].[Q1], [Time].[1997].[Q1].[1], [Time].[1997].[Q1].[3], [Time].[1997].[Q4].[12]} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Closing Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 26,796\n"
+ "Row #1: 66,291\n"
+ "Row #1: 23,706\n"
+ "Row #2: 21,628\n"
+ "Row #2: 21,628\n"
+ "Row #3: 23,706\n"
+ "Row #3: 23,706\n"
+ "Row #4: 26,796\n"
+ "Row #4: 26,796\n");
}
public void testClosingPeriodLevelNotInTimeFails() {
assertAxisThrows(
"ClosingPeriod([Store].[Store City])",
"The <level> and <member> arguments to ClosingPeriod must be from "
+ "the same hierarchy. The level was from '[Store]' but the member "
+ "was from '[Time]'");
}
public void testClosingPeriodMember() {
if (false) {
// This test is mistaken. Valid forms are ClosingPeriod(<level>)
// and ClosingPeriod(<level>, <member>), but not
// ClosingPeriod(<member>)
Member member = executeSingletonAxis("ClosingPeriod([USA])");
Assert.assertEquals("WA", member.getName());
}
}
public void testClosingPeriodMemberLeaf() {
Member member;
if (false) {
// This test is mistaken. Valid forms are ClosingPeriod(<level>)
// and ClosingPeriod(<level>, <member>), but not
// ClosingPeriod(<member>)
member = executeSingletonAxis(
"ClosingPeriod([Time].[1997].[Q3].[8])");
Assert.assertNull(member);
} else if (isDefaultNullMemberRepresentation()) {
assertQueryReturns(
"with member [Measures].[Foo] as ClosingPeriod().uniquename\n"
+ "select {[Measures].[Foo]} on columns,\n"
+ " {[Time].[1997],\n"
+ " [Time].[1997].[Q2],\n"
+ " [Time].[1997].[Q2].[4]} on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "Row #0: [Time].[1997].[Q4]\n"
+ "Row #1: [Time].[1997].[Q2].[6]\n"
+ "Row #2: [Time].[#null]\n"
// MSAS returns "" here.
+ "");
}
}
public void testClosingPeriod() {
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod([Time].[Month], [Time].[Time].CurrentMember)",
"{[Time]}");
String s1 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"(([Measures].[Store Sales],"
+ " ClosingPeriod([Time].[Month], [Time].[Time].CurrentMember)) - "
+ "([Measures].[Store Cost],"
+ " ClosingPeriod([Time].[Month], [Time].[Time].CurrentMember)))",
s1);
getTestContext().assertMemberExprDependsOn(
"ClosingPeriod([Time].[Month], [Time].[1997].[Q3])", "{}");
assertAxisReturns(
"ClosingPeriod([Time].[Year], [Time].[1997].[Q3])", "");
assertAxisReturns(
"ClosingPeriod([Time].[Quarter], [Time].[1997].[Q3])",
"[Time].[1997].[Q3]");
assertAxisReturns(
"ClosingPeriod([Time].[Month], [Time].[1997].[Q3])",
"[Time].[1997].[Q3].[9]");
assertAxisReturns(
"ClosingPeriod([Time].[Quarter], [Time].[1997])",
"[Time].[1997].[Q4]");
assertAxisReturns(
"ClosingPeriod([Time].[Year], [Time].[1997])", "[Time].[1997]");
assertAxisReturns(
"ClosingPeriod([Time].[Month], [Time].[1997])",
"[Time].[1997].[Q4].[12]");
// leaf member
assertAxisReturns(
"ClosingPeriod([Time].[Year], [Time].[1997].[Q3].[8])", "");
assertAxisReturns(
"ClosingPeriod([Time].[Quarter], [Time].[1997].[Q3].[8])", "");
assertAxisReturns(
"ClosingPeriod([Time].[Month], [Time].[1997].[Q3].[8])",
"[Time].[1997].[Q3].[8]");
// non-Time dimension
assertAxisReturns(
"ClosingPeriod([Product].[Product Name], [Product].[All Products].[Drink])",
"[Product].[Drink].[Dairy].[Dairy].[Milk].[Gorilla].[Gorilla Whole Milk]");
assertAxisReturns(
"ClosingPeriod([Product].[Product Family], [Product].[All Products].[Drink])",
"[Product].[Drink]");
// 'all' level
assertAxisReturns(
"ClosingPeriod([Product].[(All)], [Product].[All Products].[Drink])",
"");
// ragged
getTestContext().withCube("[Sales Ragged]").assertAxisReturns(
"ClosingPeriod([Store].[Store City], [Store].[All Stores].[Israel])",
"[Store].[Israel].[Israel].[Tel Aviv]");
// Default member is [Time].[1997].
assertAxisReturns(
"ClosingPeriod([Time].[Month])", "[Time].[1997].[Q4].[12]");
assertAxisReturns("ClosingPeriod()", "[Time].[1997].[Q4]");
TestContext testContext = getTestContext().withCube("[Sales Ragged]");
testContext.assertAxisReturns(
"ClosingPeriod([Store].[Store State], [Store].[All Stores].[Israel])",
"");
testContext.assertAxisThrows(
"ClosingPeriod([Time].[Year], [Store].[All Stores].[Israel])",
"The <level> and <member> arguments to ClosingPeriod must be "
+ "from the same hierarchy. The level was from '[Time]' but "
+ "the member was from '[Store]'.");
}
public void testClosingPeriodBelow() {
Member member = executeSingletonAxis(
"ClosingPeriod([Quarter],[1997].[Q3].[8])");
Assert.assertNull(member);
}
public void testCousin1() {
Member member = executeSingletonAxis("Cousin([1997].[Q4],[1998])");
Assert.assertEquals("[Time].[1998].[Q4]", member.getUniqueName());
}
public void testCousin2() {
Member member = executeSingletonAxis(
"Cousin([1997].[Q4].[12],[1998].[Q1])");
Assert.assertEquals("[Time].[1998].[Q1].[3]", member.getUniqueName());
}
public void testCousinOverrun() {
Member member = executeSingletonAxis(
"Cousin([Customers].[USA].[CA].[San Jose],"
+ " [Customers].[USA].[OR])");
// CA has more cities than OR
Assert.assertNull(member);
}
public void testCousinThreeDown() {
Member member =
executeSingletonAxis(
"Cousin([Customers].[USA].[CA].[Berkeley].[Barbara Combs],"
+ " [Customers].[Mexico])");
// Barbara Combs is the 6th child
// of the 4th child (Berkeley)
// of the 1st child (CA)
// of USA
// Annmarie Hill is the 6th child
// of the 4th child (Tixapan)
// of the 1st child (DF)
// of Mexico
Assert.assertEquals(
"[Customers].[Mexico].[DF].[Tixapan].[Annmarie Hill]",
member.getUniqueName());
}
public void testCousinSameLevel() {
Member member =
executeSingletonAxis("Cousin([Gender].[M], [Gender].[F])");
Assert.assertEquals("F", member.getName());
}
public void testCousinHigherLevel() {
Member member =
executeSingletonAxis("Cousin([Time].[1997], [Time].[1998].[Q1])");
Assert.assertNull(member);
}
public void testCousinWrongHierarchy() {
assertAxisThrows(
"Cousin([Time].[1997], [Gender].[M])",
MondrianResource.instance().CousinHierarchyMismatch.str(
"[Time].[1997]",
"[Gender].[M]"));
}
public void testParent() {
getTestContext().assertMemberExprDependsOn(
"[Gender].Parent",
"{[Gender]}");
getTestContext().assertMemberExprDependsOn("[Gender].[M].Parent", "{}");
assertAxisReturns(
"{[Store].[USA].[CA].Parent}", "[Store].[USA]");
// root member has null parent
assertAxisReturns("{[Store].[All Stores].Parent}", "");
// parent of null member is null
assertAxisReturns("{[Store].[All Stores].Parent.Parent}", "");
}
public void testParentPC() {
final TestContext testContext = getTestContext().withCube("HR");
testContext.assertAxisReturns(
"[Employees].Parent",
"");
testContext.assertAxisReturns(
"[Employees].[Sheri Nowmer].Parent",
"[Employees].[All Employees]");
testContext.assertAxisReturns(
"[Employees].[Sheri Nowmer].[Derrick Whelply].Parent",
"[Employees].[Sheri Nowmer]");
testContext.assertAxisReturns(
"[Employees].Members.Item(3)",
"[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker]");
testContext.assertAxisReturns(
"[Employees].Members.Item(3).Parent",
"[Employees].[Sheri Nowmer].[Derrick Whelply]");
testContext.assertAxisReturns(
"[Employees].AllMembers.Item(3).Parent",
"[Employees].[Sheri Nowmer].[Derrick Whelply]");
// Ascendants(<Member>) applied to parent-child hierarchy accessed via
// <Level>.Members
testContext.assertAxisReturns(
"Ascendants([Employees].Members.Item(73))",
"[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie].[Ralph Mccoy].[Bertha Jameson].[James Bailey]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie].[Ralph Mccoy].[Bertha Jameson]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie].[Ralph Mccoy]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Jacqueline Wyllie]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply]\n"
+ "[Employees].[Sheri Nowmer]\n"
+ "[Employees].[All Employees]");
}
public void testMembers() {
// <Level>.members
assertAxisReturns(
"{[Customers].[Country].Members}",
"[Customers].[Canada]\n"
+ "[Customers].[Mexico]\n"
+ "[Customers].[USA]");
// <Level>.members applied to 'all' level
assertAxisReturns(
"{[Customers].[(All)].Members}", "[Customers].[All Customers]");
// <Level>.members applied to measures dimension
// Note -- no cube-level calculated members are present
assertAxisReturns(
"{[Measures].[MeasuresLevel].Members}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]");
// <Dimension>.members applied to Measures
assertAxisReturns(
"{[Measures].Members}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]");
// <Dimension>.members applied to a query with calc measures
// Again, no calc measures are returned
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"with member [Measures].[Xxx] AS ' [Measures].[Unit Sales] '"
+ "select {[Measures].members} on columns from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n");
}
// <Level>.members applied to a query with calc measures
// Again, no calc measures are returned
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"with member [Measures].[Xxx] AS ' [Measures].[Unit Sales] '"
+ "select {[Measures].[Measures].members} on columns from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n");
}
}
public void testHierarchyMembers() {
assertAxisReturns(
"Head({[Time.Weekly].Members}, 10)",
"[Time].[Weekly].[All Weeklys]\n"
+ "[Time].[Weekly].[1997]\n"
+ "[Time].[Weekly].[1997].[1]\n"
+ "[Time].[Weekly].[1997].[1].[15]\n"
+ "[Time].[Weekly].[1997].[1].[16]\n"
+ "[Time].[Weekly].[1997].[1].[17]\n"
+ "[Time].[Weekly].[1997].[1].[18]\n"
+ "[Time].[Weekly].[1997].[1].[19]\n"
+ "[Time].[Weekly].[1997].[1].[20]\n"
+ "[Time].[Weekly].[1997].[2]");
assertAxisReturns(
"Tail({[Time.Weekly].Members}, 5)",
"[Time].[Weekly].[1998].[51].[5]\n"
+ "[Time].[Weekly].[1998].[51].[29]\n"
+ "[Time].[Weekly].[1998].[51].[30]\n"
+ "[Time].[Weekly].[1998].[52]\n"
+ "[Time].[Weekly].[1998].[52].[6]");
}
public void testAllMembers() {
// <Level>.allmembers
assertAxisReturns(
"{[Customers].[Country].allmembers}",
"[Customers].[Canada]\n"
+ "[Customers].[Mexico]\n"
+ "[Customers].[USA]");
// <Level>.allmembers applied to 'all' level
assertAxisReturns(
"{[Customers].[(All)].allmembers}", "[Customers].[All Customers]");
// <Level>.allmembers applied to measures dimension
// Note -- cube-level calculated members ARE present
assertAxisReturns(
"{[Measures].[MeasuresLevel].allmembers}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]\n"
+ "[Measures].[Profit]\n"
+ "[Measures].[Profit Growth]\n"
+ "[Measures].[Profit last Period]");
// <Dimension>.allmembers applied to Measures
assertAxisReturns(
"{[Measures].allmembers}",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]\n"
+ "[Measures].[Profit]\n"
+ "[Measures].[Profit Growth]\n"
+ "[Measures].[Profit last Period]");
// <Dimension>.allmembers applied to a query with calc measures
// Calc measures are returned
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"with member [Measures].[Xxx] AS ' [Measures].[Unit Sales] '"
+ "select {[Measures].allmembers} on columns from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Xxx]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n"
+ "Row #0: $339,610.90\n"
+ "Row #0: 0.0%\n"
+ "Row #0: $339,610.90\n"
+ "Row #0: 266,773\n");
}
// Calc measure members from schema and from query
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"WITH MEMBER [Measures].[Unit to Sales ratio] as\n"
+ " '[Measures].[Unit Sales] / [Measures].[Store Sales]', FORMAT_STRING='0.0%' "
+ "SELECT {[Measures].AllMembers} ON COLUMNS,"
+ "non empty({[Store].[Store State].Members}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Unit to Sales ratio]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 14,431.09\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: 5,498\n"
+ "Row #0: 1,110\n"
+ "Row #0: 14,447.16\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 0.0%\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 46.7%\n"
+ "Row #1: 19,287\n"
+ "Row #1: 16,081.07\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: 6,184\n"
+ "Row #1: 767\n"
+ "Row #1: 10,829.64\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 0.0%\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 48.0%\n"
+ "Row #2: 30,114\n"
+ "Row #2: 25,240.08\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: 9,906\n"
+ "Row #2: 1,104\n"
+ "Row #2: 18,459.60\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 0.0%\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 47.6%\n");
}
// Calc member in query and schema not seen
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"WITH MEMBER [Measures].[Unit to Sales ratio] as '[Measures].[Unit Sales] / [Measures].[Store Sales]', FORMAT_STRING='0.0%' "
+ "SELECT {[Measures].AllMembers} ON COLUMNS,"
+ "non empty({[Store].[Store State].Members}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Unit to Sales ratio]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 14,431.09\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: 5,498\n"
+ "Row #0: 1,110\n"
+ "Row #0: 14,447.16\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 0.0%\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 46.7%\n"
+ "Row #1: 19,287\n"
+ "Row #1: 16,081.07\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: 6,184\n"
+ "Row #1: 767\n"
+ "Row #1: 10,829.64\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 0.0%\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 48.0%\n"
+ "Row #2: 30,114\n"
+ "Row #2: 25,240.08\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: 9,906\n"
+ "Row #2: 1,104\n"
+ "Row #2: 18,459.60\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 0.0%\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 47.6%\n");
}
// Calc member in query and schema not seen
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
break;
default:
assertQueryReturns(
"WITH MEMBER [Measures].[Unit to Sales ratio] as '[Measures].[Unit Sales] / [Measures].[Store Sales]', FORMAT_STRING='0.0%' "
+ "SELECT {[Measures].Members} ON COLUMNS,"
+ "non empty({[Store].[Store State].Members}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 14,431.09\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: 5,498\n"
+ "Row #0: 1,110\n"
+ "Row #0: 14,447.16\n"
+ "Row #1: 19,287\n"
+ "Row #1: 16,081.07\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: 6,184\n"
+ "Row #1: 767\n"
+ "Row #1: 10,829.64\n"
+ "Row #2: 30,114\n"
+ "Row #2: 25,240.08\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: 9,906\n"
+ "Row #2: 1,104\n"
+ "Row #2: 18,459.60\n");
}
// Calc member in dimension based on level
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,"
+ "non empty({[Store].[Store State].AllMembers}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n"
+ "Row #3: 36,177\n"
+ "Row #3: 76,345.49\n");
// Calc member in dimension based on level not seen
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,"
+ "non empty({[Store].[Store Country].AllMembers}) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 66,291\n"
+ "Row #0: 139,628.35\n");
}
public void testAddCalculatedMembers() {
//----------------------------------------------------
// AddCalculatedMembers: Calc member in dimension based on level
// included
//----------------------------------------------------
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,"
+ "AddCalculatedMembers([Store].[USA].Children) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n"
+ "Row #3: 36,177\n"
+ "Row #3: 76,345.49\n");
//----------------------------------------------------
//Calc member in dimension based on level included
//Calc members in measures in schema included
//----------------------------------------------------
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT AddCalculatedMembers({[Measures].[Unit Sales], [Measures].[Store Sales]}) ON COLUMNS,"
+ "AddCalculatedMembers([Store].[USA].Children) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: $21,744.11\n"
+ "Row #0: 0.0%\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: $24,089.22\n"
+ "Row #1: 0.0%\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: $38,042.78\n"
+ "Row #2: 0.0%\n"
+ "Row #3: 36,177\n"
+ "Row #3: 76,345.49\n"
+ "Row #3: $45,833.33\n"
+ "Row #3: $45,833.33\n"
+ "Row #3: 0.0%\n");
//----------------------------------------------------
//Two dimensions
//----------------------------------------------------
assertQueryReturns(
"SELECT AddCalculatedMembers({[Measures].[Unit Sales], [Measures].[Store Sales]}) ON COLUMNS,"
+ "{([Store].[USA].[CA], [Gender].[F])} ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Profit]}\n"
+ "{[Measures].[Profit last Period]}\n"
+ "{[Measures].[Profit Growth]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "Row #0: 8,218\n"
+ "Row #0: 17,928.37\n"
+ "Row #0: $10,771.98\n"
+ "Row #0: $10,771.98\n"
+ "Row #0: 0.0%\n");
//----------------------------------------------------
//Should throw more than one dimension error
//----------------------------------------------------
assertAxisThrows(
"AddCalculatedMembers({([Store].[USA].[CA], [Gender].[F])})",
"Only single dimension members allowed in set for "
+ "AddCalculatedMembers");
}
public void testStripCalculatedMembers() {
assertAxisReturns(
"StripCalculatedMembers({[Measures].AllMembers})",
"[Measures].[Unit Sales]\n"
+ "[Measures].[Store Cost]\n"
+ "[Measures].[Store Sales]\n"
+ "[Measures].[Sales Count]\n"
+ "[Measures].[Customer Count]\n"
+ "[Measures].[Promotion Sales]");
// applied to empty set
assertAxisReturns("StripCalculatedMembers({[Gender].Parent})", "");
getTestContext().assertSetExprDependsOn(
"StripCalculatedMembers([Customers].CurrentMember.Children)",
"{[Customers]}");
//----------------------------------------------------
//Calc members in dimension based on level stripped
//Actual members in measures left alone
//----------------------------------------------------
assertQueryReturns(
"WITH MEMBER [Store].[USA].[CA plus OR] AS "
+ "'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})' "
+ "SELECT StripCalculatedMembers({[Measures].[Unit Sales], "
+ "[Measures].[Store Sales]}) ON COLUMNS,"
+ "StripCalculatedMembers("
+ "AddCalculatedMembers([Store].[USA].Children)) ON ROWS "
+ "FROM Sales "
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 30,114\n"
+ "Row #2: 63,282.86\n");
}
public void testCurrentMember() {
// <Dimension>.CurrentMember
assertAxisReturns("[Gender].CurrentMember", "[Gender].[All Gender]");
// <Hierarchy>.CurrentMember
assertAxisReturns(
"[Gender].Hierarchy.CurrentMember", "[Gender].[All Gender]");
// <Level>.CurrentMember
// MSAS doesn't allow this, but Mondrian does: it implicitly casts
// level to hierarchy.
assertAxisReturns("[Store Name].CurrentMember", "[Store].[All Stores]");
}
public void testCurrentMemberDepends() {
getTestContext().assertMemberExprDependsOn(
"[Gender].CurrentMember",
"{[Gender]}");
getTestContext().assertExprDependsOn(
"[Gender].[M].Dimension.Name", "{}");
// implicit call to .CurrentMember when dimension is used as a member
// expression
getTestContext().assertMemberExprDependsOn(
"[Gender].[M].Dimension",
"{[Gender]}");
getTestContext().assertMemberExprDependsOn(
"[Gender].[M].Dimension.CurrentMember", "{[Gender]}");
getTestContext().assertMemberExprDependsOn(
"[Gender].[M].Dimension.CurrentMember.Parent", "{[Gender]}");
// [Customers] is short for [Customers].CurrentMember, so
// depends upon everything
getTestContext().assertExprDependsOn(
"[Customers]", TestContext.allHiers());
}
public void testCurrentMemberFromSlicer() {
Result result = executeQuery(
"with member [Measures].[Foo] as '[Gender].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from Sales where ([Gender].[F])");
Assert.assertEquals("F", result.getCell(new int[]{0}).getValue());
}
public void testCurrentMemberFromDefaultMember() {
Result result = executeQuery(
"with member [Measures].[Foo] as"
+ " '[Time].[Time].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from Sales");
Assert.assertEquals("1997", result.getCell(new int[]{0}).getValue());
}
public void testCurrentMemberMultiHierarchy() {
final String hierarchyName =
MondrianProperties.instance().SsasCompatibleNaming.get()
? "Weekly"
: "Time.Weekly";
final String queryString =
"with member [Measures].[Foo] as\n"
+ " 'IIf(([Time].[Time].CurrentMember.Hierarchy.Name = \""
+ hierarchyName
+ "\"), \n"
+ "[Measures].[Unit Sales], \n"
+ "- [Measures].[Unit Sales])'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} ON COLUMNS,\n"
+ " {[Product].[Food].[Dairy]} ON ROWS\n"
+ "from [Sales]";
Result result =
executeQuery(
queryString + " where [Time].[1997]");
final int[] coords = {1, 0};
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
// As above, but context provided on rows axis as opposed to slicer.
final String queryString1 =
"with member [Measures].[Foo] as\n"
+ " 'IIf(([Time].[Time].CurrentMember.Hierarchy.Name = \""
+ hierarchyName
+ "\"), \n"
+ "[Measures].[Unit Sales], \n"
+ "- [Measures].[Unit Sales])'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} ON COLUMNS,";
final String queryString2 =
"from [Sales]\n"
+ " where [Product].[Food].[Dairy] ";
result =
executeQuery(
queryString1 + " {[Time].[1997]} ON ROWS " + queryString2);
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
result =
executeQuery(
queryString + " where [Time.Weekly].[1997]");
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
result =
executeQuery(
queryString1 + " {[Time.Weekly].[1997]} ON ROWS "
+ queryString2);
Assert.assertEquals(
"-12,885",
result.getCell(coords).getFormattedValue());
}
public void testDefaultMember() {
// [Time] has no default member and no all, so the default member is
// the first member of the first level.
Result result =
executeQuery(
"select {[Time].[Time].DefaultMember} on columns\n"
+ "from Sales");
Assert.assertEquals(
"1997",
result.getAxes()[0].getPositions().get(0).get(0).getName());
// [Time].[Weekly] has an all member and no explicit default.
result =
executeQuery(
"select {[Time.Weekly].DefaultMember} on columns\n"
+ "from Sales");
Assert.assertEquals(
MondrianProperties.instance().SsasCompatibleNaming.get()
? "All Weeklys"
: "All Time.Weeklys",
result.getAxes()[0].getPositions().get(0).get(0).getName());
final String memberUname =
MondrianProperties.instance().SsasCompatibleNaming.get()
? "[Time2].[Weekly].[1997].[23]"
: "[Time2.Weekly].[1997].[23]";
TestContext testContext = TestContext.instance().createSubstitutingCube(
"Sales",
" <Dimension name=\"Time2\" type=\"TimeDimension\" foreignKey=\"time_id\">\n"
+ " <Hierarchy hasAll=\"false\" primaryKey=\"time_id\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n"
+ " levelType=\"TimeYears\"/>\n"
+ " <Level name=\"Quarter\" column=\"quarter\" uniqueMembers=\"false\"\n"
+ " levelType=\"TimeQuarters\"/>\n"
+ " <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " levelType=\"TimeMonths\"/>\n"
+ " </Hierarchy>\n"
+ " <Hierarchy hasAll=\"true\" name=\"Weekly\" primaryKey=\"time_id\"\n"
+ " defaultMember=\""
+ memberUname
+ "\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n"
+ " levelType=\"TimeYears\"/>\n"
+ " <Level name=\"Week\" column=\"week_of_year\" type=\"Numeric\" uniqueMembers=\"false\"\n"
+ " levelType=\"TimeWeeks\"/>\n"
+ " <Level name=\"Day\" column=\"day_of_month\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " levelType=\"TimeDays\"/>\n"
+ " </Hierarchy>\n"
+ " </Dimension>");
// In this variant of the schema, Time2.Weekly has an explicit default
// member.
result =
testContext.executeQuery(
"select {[Time2.Weekly].DefaultMember} on columns\n"
+ "from Sales");
Assert.assertEquals(
"23",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testCurrentMemberFromAxis() {
Result result = executeQuery(
"with member [Measures].[Foo] as"
+ " '[Gender].CurrentMember.Name"
+ " || [Marital Status].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns,\n"
+ " CrossJoin({[Gender].children},"
+ " {[Marital Status].children}) on rows\n"
+ "from Sales");
Assert.assertEquals("FM", result.getCell(new int[]{0, 0}).getValue());
}
/**
* When evaluating a calculated member, MSOLAP regards that
* calculated member as the current member of that dimension, so it
* cycles in this case. But I disagree; it is the previous current
* member, before the calculated member was expanded.
*/
public void testCurrentMemberInCalcMember() {
Result result = executeQuery(
"with member [Measures].[Foo] as '[Measures].CurrentMember.Name'\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from Sales");
Assert.assertEquals(
"Unit Sales", result.getCell(new int[]{0}).getValue());
}
/**
* Tests NamedSet.CurrentOrdinal combined with the Order function.
*/
public void testNamedSetCurrentOrdinalWithOrder() {
// The <Named Set>.CurrentOrdinal only works correctly when named sets
// are evaluated as iterables, and JDK 1.4 only supports lists.
if (Util.Retrowoven) {
return;
}
assertQueryReturns(
"with set [Time Regular] as [Time].[Time].Members\n"
+ " set [Time Reversed] as"
+ " Order([Time Regular], [Time Regular].CurrentOrdinal, BDESC)\n"
+ "select [Time Reversed] on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1998].[Q4].[12]}\n"
+ "{[Time].[1998].[Q4].[11]}\n"
+ "{[Time].[1998].[Q4].[10]}\n"
+ "{[Time].[1998].[Q4]}\n"
+ "{[Time].[1998].[Q3].[9]}\n"
+ "{[Time].[1998].[Q3].[8]}\n"
+ "{[Time].[1998].[Q3].[7]}\n"
+ "{[Time].[1998].[Q3]}\n"
+ "{[Time].[1998].[Q2].[6]}\n"
+ "{[Time].[1998].[Q2].[5]}\n"
+ "{[Time].[1998].[Q2].[4]}\n"
+ "{[Time].[1998].[Q2]}\n"
+ "{[Time].[1998].[Q1].[3]}\n"
+ "{[Time].[1998].[Q1].[2]}\n"
+ "{[Time].[1998].[Q1].[1]}\n"
+ "{[Time].[1998].[Q1]}\n"
+ "{[Time].[1998]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997]}\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 26,796\n"
+ "Row #0: 25,270\n"
+ "Row #0: 19,958\n"
+ "Row #0: 72,024\n"
+ "Row #0: 20,388\n"
+ "Row #0: 21,697\n"
+ "Row #0: 23,763\n"
+ "Row #0: 65,848\n"
+ "Row #0: 21,350\n"
+ "Row #0: 21,081\n"
+ "Row #0: 20,179\n"
+ "Row #0: 62,610\n"
+ "Row #0: 23,706\n"
+ "Row #0: 20,957\n"
+ "Row #0: 21,628\n"
+ "Row #0: 66,291\n"
+ "Row #0: 266,773\n");
}
/**
* Tests NamedSet.CurrentOrdinal combined with the Generate function.
*/
public void testNamedSetCurrentOrdinalWithGenerate() {
// The <Named Set>.CurrentOrdinal only works correctly when named sets
// are evaluated as iterables, and JDK 1.4 only supports lists.
if (Util.Retrowoven) {
return;
}
assertQueryReturns(
" with set [Time Regular] as [Time].[Time].Members\n"
+ "set [Every Other Time] as\n"
+ " Generate(\n"
+ " [Time Regular],\n"
+ " {[Time].[Time].Members.Item(\n"
+ " [Time Regular].CurrentOrdinal * 2)})\n"
+ "select [Every Other Time] on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "{[Time].[1998].[Q1]}\n"
+ "{[Time].[1998].[Q1].[2]}\n"
+ "{[Time].[1998].[Q2]}\n"
+ "{[Time].[1998].[Q2].[5]}\n"
+ "{[Time].[1998].[Q3]}\n"
+ "{[Time].[1998].[Q3].[8]}\n"
+ "{[Time].[1998].[Q4]}\n"
+ "{[Time].[1998].[Q4].[11]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 21,628\n"
+ "Row #0: 23,706\n"
+ "Row #0: 20,179\n"
+ "Row #0: 21,350\n"
+ "Row #0: 23,763\n"
+ "Row #0: 20,388\n"
+ "Row #0: 19,958\n"
+ "Row #0: 26,796\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n");
}
public void testNamedSetCurrentOrdinalWithFilter() {
// The <Named Set>.CurrentOrdinal only works correctly when named sets
// are evaluated as iterables, and JDK 1.4 only supports lists.
if (Util.Retrowoven) {
return;
}
assertQueryReturns(
"with set [Time Regular] as [Time].[Time].Members\n"
+ " set [Time Subset] as "
+ " Filter([Time Regular], [Time Regular].CurrentOrdinal = 3"
+ " or [Time Regular].CurrentOrdinal = 5)\n"
+ "select [Time Subset] on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "Row #0: 20,957\n"
+ "Row #0: 62,610\n");
}
public void testNamedSetCurrentOrdinalWithCrossjoin() {
// TODO:
}
public void testNamedSetCurrentOrdinalWithNonNamedSetFails() {
// a named set wrapped in {...} is not a named set, so CurrentOrdinal
// fails
assertQueryThrows(
"with set [Time Members] as [Time].Members\n"
+ "member [Measures].[Foo] as ' {[Time Members]}.CurrentOrdinal '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
// as above for Current function
assertQueryThrows(
"with set [Time Members] as [Time].Members\n"
+ "member [Measures].[Foo] as ' {[Time Members]}.Current.Name '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
// a set expression is not a named set, so CurrentOrdinal fails
assertQueryThrows(
"with member [Measures].[Foo] as\n"
+ " ' Head([Time].Members, 5).CurrentOrdinal '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
// as above for Current function
assertQueryThrows(
"with member [Measures].[Foo] as\n"
+ " ' Crossjoin([Time].Members, [Gender].Members).Current.Name '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on 0,\n"
+ " {[Product].Children} on 1\n"
+ "from [Sales]",
"Not a named set");
}
public void testDimensionDefaultMember() {
Member member = executeSingletonAxis("[Measures].DefaultMember");
Assert.assertEquals("Unit Sales", member.getName());
}
public void testDrilldownLevel() {
// Expect all children of USA
assertAxisReturns(
"DrilldownLevel({[Store].[USA]}, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
// Expect same set, because [USA] is already drilled
assertAxisReturns(
"DrilldownLevel({[Store].[USA], [Store].[USA].[CA]}, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]");
// Expect drill, because [USA] isn't already drilled. You can't
// drill down on [CA] and get to [USA]
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]}, [Store].[Store Country])",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]},, 0)",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]} * {[Gender].Members},, 0)",
"{[Store].[USA].[CA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Gender].[F]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Gender].[M]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Gender].[M]}\n"
+ "{[Store].[USA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[OR], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[WA], [Gender].[All Gender]}\n"
+ "{[Store].[USA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[OR], [Gender].[F]}\n"
+ "{[Store].[USA].[WA], [Gender].[F]}\n"
+ "{[Store].[USA], [Gender].[M]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA].[OR], [Gender].[M]}\n"
+ "{[Store].[USA].[WA], [Gender].[M]}");
assertAxisReturns(
"DrilldownLevel({[Store].[USA].[CA],[Store].[USA]} * {[Gender].Members},, 1)",
"{[Store].[USA].[CA], [Gender].[All Gender]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA].[CA], [Gender].[F]}\n"
+ "{[Store].[USA].[CA], [Gender].[M]}\n"
+ "{[Store].[USA], [Gender].[All Gender]}\n"
+ "{[Store].[USA], [Gender].[F]}\n"
+ "{[Store].[USA], [Gender].[M]}\n"
+ "{[Store].[USA], [Gender].[F]}\n"
+ "{[Store].[USA], [Gender].[M]}");
}
public void testDrilldownLevelTop() {
// <set>, <n>, <level>
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]");
// similarly DrilldownLevelBottom
assertAxisReturns(
"DrilldownLevelBottom({[Store].[USA]}, 2, [Store].[Store Country])",
"[Store].[USA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[CA]");
// <set>, <n>
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2)",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]");
// <n> greater than number of children
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA], [Store].[Canada]}, 4)",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[Canada]\n"
+ "[Store].[Canada].[BC]");
// <n> negative
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2 - 3)",
"[Store].[USA]");
// <n> zero
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2 - 2)",
"[Store].[USA]");
// <n> null
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, null)",
"[Store].[USA]");
// mixed bag, no level, all expanded
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA], "
+ "[Store].[USA].[CA].[San Francisco], "
+ "[Store].[All Stores], "
+ "[Store].[Canada].[BC]}, "
+ "2)",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[CA].[San Francisco].[Store 14]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[USA]\n"
+ "[Store].[Canada]\n"
+ "[Store].[Canada].[BC]\n"
+ "[Store].[Canada].[BC].[Vancouver]\n"
+ "[Store].[Canada].[BC].[Victoria]");
// mixed bag, only specified level expanded
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA], "
+ "[Store].[USA].[CA].[San Francisco], "
+ "[Store].[All Stores], "
+ "[Store].[Canada].[BC]}, 2, [Store].[Store City])",
"[Store].[USA]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[CA].[San Francisco].[Store 14]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[Canada].[BC]");
// bad level
assertAxisThrows(
"DrilldownLevelTop({[Store].[USA]}, 2, [Customers].[Country])",
"Level '[Customers].[Country]' not compatible with "
+ "member '[Store].[USA]'");
}
public void testDrilldownMemberEmptyExpr() {
// no level, with expression
assertAxisReturns(
"DrilldownLevelTop({[Store].[USA]}, 2, , [Measures].[Unit Sales])",
"[Store].[USA]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA]");
// reverse expression
assertAxisReturns(
"DrilldownLevelTop("
+ "{[Store].[USA]}, 2, , - [Measures].[Unit Sales])",
"[Store].[USA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[CA]");
}
public void testDrilldownMember() {
// Expect all children of USA
assertAxisReturns(
"DrilldownMember({[Store].[USA]}, {[Store].[USA]})",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
// Expect all children of USA.CA and USA.OR
assertAxisReturns(
"DrilldownMember({[Store].[USA].[CA], [Store].[USA].[OR]}, "
+ "{[Store].[USA].[CA], [Store].[USA].[OR], [Store].[USA].[WA]})",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[OR].[Salem]");
// Second set is empty
assertAxisReturns(
"DrilldownMember({[Store].[USA]}, {})",
"[Store].[USA]");
// Drill down a leaf member
assertAxisReturns(
"DrilldownMember({[Store].[All Stores].[USA].[CA].[San Francisco].[Store 14]}, "
+ "{[Store].[USA].[CA].[San Francisco].[Store 14]})",
"[Store].[USA].[CA].[San Francisco].[Store 14]");
// Complex case with option recursive
assertAxisReturns(
"DrilldownMember({[Store].[All Stores].[USA]}, "
+ "{[Store].[All Stores].[USA], [Store].[All Stores].[USA].[CA], "
+ "[Store].[All Stores].[USA].[CA].[San Diego], [Store].[All Stores].[USA].[WA]}, "
+ "RECURSIVE)",
"[Store].[USA]\n"
+ "[Store].[USA].[CA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Diego].[Store 24]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[WA].[Bellingham]\n"
+ "[Store].[USA].[WA].[Bremerton]\n"
+ "[Store].[USA].[WA].[Seattle]\n"
+ "[Store].[USA].[WA].[Spokane]\n"
+ "[Store].[USA].[WA].[Tacoma]\n"
+ "[Store].[USA].[WA].[Walla Walla]\n"
+ "[Store].[USA].[WA].[Yakima]");
// Sets of tuples
assertAxisReturns(
"DrilldownMember({([Store Type].[Supermarket], [Store].[USA])}, {[Store].[USA]})",
"{[Store Type].[Supermarket], [Store].[USA]}\n"
+ "{[Store Type].[Supermarket], [Store].[USA].[CA]}\n"
+ "{[Store Type].[Supermarket], [Store].[USA].[OR]}\n"
+ "{[Store Type].[Supermarket], [Store].[USA].[WA]}");
}
public void testFirstChildFirstInLevel() {
Member member = executeSingletonAxis("[Time].[1997].[Q4].FirstChild");
Assert.assertEquals("10", member.getName());
}
public void testFirstChildAll() {
Member member =
executeSingletonAxis("[Gender].[All Gender].FirstChild");
Assert.assertEquals("F", member.getName());
}
public void testFirstChildOfChildless() {
Member member =
executeSingletonAxis("[Gender].[All Gender].[F].FirstChild");
Assert.assertNull(member);
}
public void testFirstSiblingFirstInLevel() {
Member member = executeSingletonAxis("[Gender].[F].FirstSibling");
Assert.assertEquals("F", member.getName());
}
public void testFirstSiblingLastInLevel() {
Member member =
executeSingletonAxis("[Time].[1997].[Q4].FirstSibling");
Assert.assertEquals("Q1", member.getName());
}
public void testFirstSiblingAll() {
Member member =
executeSingletonAxis("[Gender].[All Gender].FirstSibling");
Assert.assertTrue(member.isAll());
}
public void testFirstSiblingRoot() {
// The [Measures] hierarchy does not have an 'all' member, so
// [Unit Sales] does not have a parent.
Member member =
executeSingletonAxis("[Measures].[Store Sales].FirstSibling");
Assert.assertEquals("Unit Sales", member.getName());
}
public void testFirstSiblingNull() {
Member member =
executeSingletonAxis("[Gender].[F].FirstChild.FirstSibling");
Assert.assertNull(member);
}
public void testLag() {
Member member = executeSingletonAxis("[Time].[1997].[Q4].[12].Lag(4)");
Assert.assertEquals("8", member.getName());
}
public void testLagFirstInLevel() {
Member member = executeSingletonAxis("[Gender].[F].Lag(1)");
Assert.assertNull(member);
}
public void testLagAll() {
Member member = executeSingletonAxis("[Gender].DefaultMember.Lag(2)");
Assert.assertNull(member);
}
public void testLagRoot() {
Member member = executeSingletonAxis("[Time].[1998].Lag(1)");
Assert.assertEquals("1997", member.getName());
}
public void testLagRootTooFar() {
Member member = executeSingletonAxis("[Time].[1998].Lag(2)");
Assert.assertNull(member);
}
public void testLastChild() {
Member member = executeSingletonAxis("[Gender].LastChild");
Assert.assertEquals("M", member.getName());
}
public void testLastChildLastInLevel() {
Member member = executeSingletonAxis("[Time].[1997].[Q4].LastChild");
Assert.assertEquals("12", member.getName());
}
public void testLastChildAll() {
Member member = executeSingletonAxis("[Gender].[All Gender].LastChild");
Assert.assertEquals("M", member.getName());
}
public void testLastChildOfChildless() {
Member member = executeSingletonAxis("[Gender].[M].LastChild");
Assert.assertNull(member);
}
public void testLastSibling() {
Member member = executeSingletonAxis("[Gender].[F].LastSibling");
Assert.assertEquals("M", member.getName());
}
public void testLastSiblingFirstInLevel() {
Member member = executeSingletonAxis("[Time].[1997].[Q1].LastSibling");
Assert.assertEquals("Q4", member.getName());
}
public void testLastSiblingAll() {
Member member =
executeSingletonAxis("[Gender].[All Gender].LastSibling");
Assert.assertTrue(member.isAll());
}
public void testLastSiblingRoot() {
// The [Time] hierarchy does not have an 'all' member, so
// [1997], [1998] do not have parents.
Member member = executeSingletonAxis("[Time].[1998].LastSibling");
Assert.assertEquals("1998", member.getName());
}
public void testLastSiblingNull() {
Member member =
executeSingletonAxis("[Gender].[F].FirstChild.LastSibling");
Assert.assertNull(member);
}
public void testLead() {
Member member = executeSingletonAxis("[Time].[1997].[Q2].[4].Lead(4)");
Assert.assertEquals("8", member.getName());
}
public void testLeadNegative() {
Member member = executeSingletonAxis("[Gender].[M].Lead(-1)");
Assert.assertEquals("F", member.getName());
}
public void testLeadLastInLevel() {
Member member = executeSingletonAxis("[Gender].[M].Lead(3)");
Assert.assertNull(member);
}
public void testLeadNull() {
Member member = executeSingletonAxis("[Gender].Parent.Lead(1)");
Assert.assertNull(member);
}
public void testLeadZero() {
Member member = executeSingletonAxis("[Gender].[F].Lead(0)");
Assert.assertEquals("F", member.getName());
}
public void testBasic2() {
Result result =
executeQuery(
"select {[Gender].[F].NextMember} ON COLUMNS from Sales");
assertEquals(
"M",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testFirstInLevel2() {
Result result =
executeQuery(
"select {[Gender].[M].NextMember} ON COLUMNS from Sales");
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testAll2() {
Result result =
executeQuery("select {[Gender].PrevMember} ON COLUMNS from Sales");
// previous to [Gender].[All] is null, so no members are returned
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testBasic5() {
Result result =
executeQuery(
"select{ [Product].[All Products].[Drink].Parent} on columns "
+ "from Sales");
assertEquals(
"All Products",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testFirstInLevel5() {
Result result =
executeQuery(
"select {[Time].[1997].[Q2].[4].Parent} on columns,"
+ "{[Gender].[M]} on rows from Sales");
assertEquals(
"Q2",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testAll5() {
Result result =
executeQuery(
"select {[Time].[1997].[Q2].Parent} on columns,"
+ "{[Gender].[M]} on rows from Sales");
// previous to [Gender].[All] is null, so no members are returned
assertEquals(
"1997",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testBasic() {
Result result =
executeQuery(
"select {[Gender].[M].PrevMember} ON COLUMNS from Sales");
assertEquals(
"F",
result.getAxes()[0].getPositions().get(0).get(0).getName());
}
public void testFirstInLevel() {
Result result =
executeQuery(
"select {[Gender].[F].PrevMember} ON COLUMNS from Sales");
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testAll() {
Result result =
executeQuery("select {[Gender].PrevMember} ON COLUMNS from Sales");
// previous to [Gender].[All] is null, so no members are returned
assertEquals(0, result.getAxes()[0].getPositions().size());
}
public void testAggregateDepends() {
// Depends on everything except Measures, Gender
String s12 = TestContext.allHiersExcept("[Measures]", "[Gender]");
getTestContext().assertExprDependsOn(
"([Measures].[Unit Sales], [Gender].[F])", s12);
// Depends on everything except Customers, Measures, Gender
String s13 = TestContext.allHiersExcept("[Customers]", "[Gender]");
getTestContext().assertExprDependsOn(
"Aggregate([Customers].Members, ([Measures].[Unit Sales], [Gender].[F]))",
s13);
// Depends on everything except Customers
String s11 = TestContext.allHiersExcept("[Customers]");
getTestContext().assertExprDependsOn(
"Aggregate([Customers].Members)",
s11);
// Depends on the current member of the Product dimension, even though
// [Product].[All Products] is referenced from the expression.
String s1 = TestContext.allHiersExcept("[Customers]");
getTestContext().assertExprDependsOn(
"Aggregate(Filter([Customers].[City].Members, (([Measures].[Unit Sales] / ([Measures].[Unit Sales], [Product].[All Products])) > 0.1)))",
s1);
}
public void testAggregate() {
assertQueryReturns(
"WITH MEMBER [Store].[CA plus OR] AS 'AGGREGATE({[Store].[USA].[CA], [Store].[USA].[OR]})'\n"
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS,\n"
+ " {[Store].[USA].[CA], [Store].[USA].[OR], [Store].[CA plus OR]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE ([1997].[Q1])",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[CA plus OR]}\n"
+ "Row #0: 16,890\n"
+ "Row #0: 36,175.20\n"
+ "Row #1: 19,287\n"
+ "Row #1: 40,170.29\n"
+ "Row #2: 36,177\n"
+ "Row #2: 76,345.49\n");
}
public void testAggregate2() {
assertQueryReturns(
"WITH\n"
+ " Member [Time].[Time].[1st Half Sales] AS 'Aggregate({Time.[1997].[Q1], Time.[1997].[Q2]})'\n"
+ " Member [Time].[Time].[2nd Half Sales] AS 'Aggregate({Time.[1997].[Q3], Time.[1997].[Q4]})'\n"
+ " Member [Time].[Time].[Difference] AS 'Time.[2nd Half Sales] - Time.[1st Half Sales]'\n"
+ "SELECT\n"
+ " { [Store].[Store State].Members} ON COLUMNS,\n"
+ " { Time.[1st Half Sales], Time.[2nd Half Sales], Time.[Difference]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE [Measures].[Store Sales]",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Store].[Canada].[BC]}\n"
+ "{[Store].[Mexico].[DF]}\n"
+ "{[Store].[Mexico].[Guerrero]}\n"
+ "{[Store].[Mexico].[Jalisco]}\n"
+ "{[Store].[Mexico].[Veracruz]}\n"
+ "{[Store].[Mexico].[Yucatan]}\n"
+ "{[Store].[Mexico].[Zacatecas]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Axis #2:\n"
+ "{[Time].[1st Half Sales]}\n"
+ "{[Time].[2nd Half Sales]}\n"
+ "{[Time].[Difference]}\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 74,571.95\n"
+ "Row #0: 71,943.17\n"
+ "Row #0: 125,779.50\n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: 84,595.89\n"
+ "Row #1: 70,333.90\n"
+ "Row #1: 138,013.72\n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: 10,023.94\n"
+ "Row #2: -1,609.27\n"
+ "Row #2: 12,234.22\n");
}
public void testAggregateWithIIF() {
assertQueryReturns(
"with member store.foo as 'iif(3>1,"
+ "aggregate({[Store].[All Stores].[USA].[OR]}),"
+ "aggregate({[Store].[All Stores].[USA].[CA]}))' "
+ "select {store.foo} on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[foo]}\n"
+ "Row #0: 67,659\n");
}
public void testAggregate2AllMembers() {
assertQueryReturns(
"WITH\n"
+ " Member [Time].[Time].[1st Half Sales] AS 'Aggregate({Time.[1997].[Q1], Time.[1997].[Q2]})'\n"
+ " Member [Time].[Time].[2nd Half Sales] AS 'Aggregate({Time.[1997].[Q3], Time.[1997].[Q4]})'\n"
+ " Member [Time].[Time].[Difference] AS 'Time.[2nd Half Sales] - Time.[1st Half Sales]'\n"
+ "SELECT\n"
+ " { [Store].[Store State].AllMembers} ON COLUMNS,\n"
+ " { Time.[1st Half Sales], Time.[2nd Half Sales], Time.[Difference]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE [Measures].[Store Sales]",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Store].[Canada].[BC]}\n"
+ "{[Store].[Mexico].[DF]}\n"
+ "{[Store].[Mexico].[Guerrero]}\n"
+ "{[Store].[Mexico].[Jalisco]}\n"
+ "{[Store].[Mexico].[Veracruz]}\n"
+ "{[Store].[Mexico].[Yucatan]}\n"
+ "{[Store].[Mexico].[Zacatecas]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "Axis #2:\n"
+ "{[Time].[1st Half Sales]}\n"
+ "{[Time].[2nd Half Sales]}\n"
+ "{[Time].[Difference]}\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 74,571.95\n"
+ "Row #0: 71,943.17\n"
+ "Row #0: 125,779.50\n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: \n"
+ "Row #1: 84,595.89\n"
+ "Row #1: 70,333.90\n"
+ "Row #1: 138,013.72\n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: \n"
+ "Row #2: 10,023.94\n"
+ "Row #2: -1,609.27\n"
+ "Row #2: 12,234.22\n");
}
public void testAggregateToSimulateCompoundSlicer() {
assertQueryReturns(
"WITH MEMBER [Time].[Time].[1997 H1] as 'Aggregate({[Time].[1997].[Q1], [Time].[1997].[Q2]})'\n"
+ " MEMBER [Education Level].[College or higher] as 'Aggregate({[Education Level].[Bachelors Degree], [Education Level].[Graduate Degree]})'\n"
+ "SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} on columns,\n"
+ " {[Product].children} on rows\n"
+ "FROM [Sales]\n"
+ "WHERE ([Time].[1997 H1], [Education Level].[College or higher], [Gender].[F])",
"Axis #0:\n"
+ "{[Time].[1997 H1], [Education Level].[College or higher], [Gender].[F]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 1,797\n"
+ "Row #0: 3,620.49\n"
+ "Row #1: 15,002\n"
+ "Row #1: 31,931.88\n"
+ "Row #2: 3,845\n"
+ "Row #2: 8,173.22\n");
}
/**
* Tests behavior where CurrentMember occurs in calculated members and
* that member is a set.
*
* <p>Mosha discusses this behavior in the article
* <a href="http://www.mosha.com/msolap/articles/mdxmultiselectcalcs.htm">
* Multiselect friendly MDX calculations</a>.
*
* <p>Mondrian's behavior is consistent with MSAS 2K: it returns zeroes.
* SSAS 2005 returns an error, which can be fixed by reformulating the
* calculated members.
*
* @see mondrian.rolap.FastBatchingCellReaderTest#testAggregateDistinctCount()
*/
public void testMultiselectCalculations() {
assertQueryReturns(
"WITH\n"
+ "MEMBER [Measures].[Declining Stores Count] AS\n"
+ " ' Count(Filter(Descendants(Store.CurrentMember, Store.[Store Name]), [Store Sales] < ([Store Sales],Time.Time.PrevMember))) '\n"
+ " MEMBER \n"
+ " [Store].[XL_QZX] AS 'Aggregate ({ [Store].[All Stores].[USA].[WA] , [Store].[All Stores].[USA].[CA] })' \n"
+ "SELECT \n"
+ " NON EMPTY HIERARCHIZE(AddCalculatedMembers({DrillDownLevel({[Product].[All Products]})})) \n"
+ " DIMENSION PROPERTIES PARENT_UNIQUE_NAME ON COLUMNS \n"
+ "FROM [Sales] \n"
+ "WHERE ([Measures].[Declining Stores Count], [Time].[1998].[Q3], [Store].[XL_QZX])",
"Axis #0:\n"
+ "{[Measures].[Declining Stores Count], [Time].[1998].[Q3], [Store].[XL_QZX]}\n"
+ "Axis #1:\n"
+ "{[Product].[All Products]}\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: .00\n"
+ "Row #0: .00\n"
+ "Row #0: .00\n"
+ "Row #0: .00\n");
}
public void testAvg() {
assertExprReturns(
"AVG({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"188,412.71");
}
//todo: testAvgWithNulls
public void testCorrelation() {
assertExprReturns(
"Correlation({[Store].[All Stores].[USA].children}, [Measures].[Unit Sales], [Measures].[Store Sales]) * 1000000",
"999,906");
}
public void testCount() {
getTestContext().assertExprDependsOn(
"count(Crossjoin([Store].[All Stores].[USA].Children, {[Gender].children}), INCLUDEEMPTY)",
"{[Gender]}");
String s1 = TestContext.allHiersExcept("[Store]");
getTestContext().assertExprDependsOn(
"count(Crossjoin([Store].[All Stores].[USA].Children, "
+ "{[Gender].children}), EXCLUDEEMPTY)",
s1);
assertExprReturns(
"count({[Promotion Media].[Media Type].members})", "14");
// applied to an empty set
assertExprReturns("count({[Gender].Parent}, IncludeEmpty)", "0");
}
public void testCountExcludeEmpty() {
String s1 = TestContext.allHiersExcept("[Store]");
getTestContext().assertExprDependsOn(
"count(Crossjoin([Store].[USA].Children, {[Gender].children}), EXCLUDEEMPTY)",
s1);
assertQueryReturns(
"with member [Measures].[Promo Count] as \n"
+ " ' Count(Crossjoin({[Measures].[Unit Sales]},\n"
+ " {[Promotion Media].[Media Type].members}), EXCLUDEEMPTY)'\n"
+ "select {[Measures].[Unit Sales], [Measures].[Promo Count]} on columns,\n"
+ " {[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].children} on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Promo Count]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Excellent]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Fabulous]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Skinner]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Token]}\n"
+ "{[Product].[Drink].[Beverages].[Carbonated Beverages].[Soda].[Washington]}\n"
+ "Row #0: 738\n"
+ "Row #0: 14\n"
+ "Row #1: 632\n"
+ "Row #1: 13\n"
+ "Row #2: 655\n"
+ "Row #2: 14\n"
+ "Row #3: 735\n"
+ "Row #3: 14\n"
+ "Row #4: 647\n"
+ "Row #4: 12\n");
// applied to an empty set
assertExprReturns("count({[Gender].Parent}, ExcludeEmpty)", "0");
}
/**
* Tests that the 'null' value is regarded as empty, even if the underlying
* cell has fact table rows.
*
* <p>For a fuller test case, see
* {@link mondrian.xmla.XmlaCognosTest#testCognosMDXSuiteConvertedAdventureWorksToFoodMart_015()}
*/
public void testCountExcludeEmptyNull() {
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS\n"
+ " Iif("
+ TestContext.hierarchyName("Time", "Time")
+ ".CurrentMember.Name = 'Q2', 1, NULL)\n"
+ " MEMBER [Measures].[Bar] AS\n"
+ " Iif("
+ TestContext.hierarchyName("Time", "Time")
+ ".CurrentMember.Name = 'Q2', 1, 0)\n"
+ " Member [Time].[Time].[CountExc] AS\n"
+ " Count([Time].[1997].Children, EXCLUDEEMPTY),\n"
+ " SOLVE_ORDER = 2\n"
+ " Member [Time].[Time].[CountInc] AS\n"
+ " Count([Time].[1997].Children, INCLUDEEMPTY),\n"
+ " SOLVE_ORDER = 2\n"
+ "SELECT {[Measures].[Foo],\n"
+ " [Measures].[Bar],\n"
+ " [Measures].[Unit Sales]} ON 0,\n"
+ " {[Time].[1997].Children,\n"
+ " [Time].[CountExc],\n"
+ " [Time].[CountInc]} ON 1\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "{[Measures].[Bar]}\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[CountExc]}\n"
+ "{[Time].[CountInc]}\n"
+ "Row #0: \n"
+ "Row #0: 0\n"
+ "Row #0: 66,291\n"
+ "Row #1: 1\n"
+ "Row #1: 1\n"
+ "Row #1: 62,610\n"
+ "Row #2: \n"
+ "Row #2: 0\n"
+ "Row #2: 65,848\n"
+ "Row #3: \n"
+ "Row #3: 0\n"
+ "Row #3: 72,024\n"
+ "Row #4: 1\n"
+ "Row #4: 4\n"
+ "Row #4: 4\n"
+ "Row #5: 4\n"
+ "Row #5: 4\n"
+ "Row #5: 4\n");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-710">
* bug MONDRIAN-710, "Count with ExcludeEmpty throws an exception when the
* cube does not have a factCountMeasure"</a>.
*/
public void testCountExcludeEmptyOnCubeWithNoCountFacts() {
assertQueryReturns(
"WITH "
+ " MEMBER [Measures].[count] AS '"
+ " COUNT([Store Type].[Store Type].MEMBERS, EXCLUDEEMPTY)'"
+ " SELECT "
+ " {[Measures].[count]} ON AXIS(0)"
+ " FROM [Warehouse]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[count]}\n"
+ "Row #0: 5\n");
}
public void testCountExcludeEmptyOnVirtualCubeWithNoCountFacts() {
assertQueryReturns(
"WITH "
+ " MEMBER [Measures].[count] AS '"
+ " COUNT([Store].MEMBERS, EXCLUDEEMPTY)'"
+ " SELECT "
+ " {[Measures].[count]} ON AXIS(0)"
+ " FROM [Warehouse and Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[count]}\n"
+ "Row #0: 31\n");
}
//todo: testCountNull, testCountNoExp
public void testCovariance() {
assertExprReturns(
"Covariance({[Store].[All Stores].[USA].children}, [Measures].[Unit Sales], [Measures].[Store Sales])",
"1,355,761,899");
}
public void testCovarianceN() {
assertExprReturns(
"CovarianceN({[Store].[All Stores].[USA].children}, [Measures].[Unit Sales], [Measures].[Store Sales])",
"2,033,642,849");
}
public void testIIfNumeric() {
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, 45, 32)",
"45");
// Compare two members. The system needs to figure out that they are
// both numeric, and use the right overloaded version of ">", otherwise
// we'll get a ClassCastException at runtime.
assertExprReturns(
"IIf([Measures].[Unit Sales] > [Measures].[Store Sales], 45, 32)",
"32");
}
public void testMax() {
assertExprReturns(
"MAX({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"263,793.22");
}
public void testMaxNegative() {
// Bug 1771928, "Max() works incorrectly with negative values"
assertQueryReturns(
"with \n"
+ " member [Customers].[Neg] as '-1'\n"
+ " member [Customers].[Min] as 'Min({[Customers].[Neg]})'\n"
+ " member [Customers].[Max] as 'Max({[Customers].[Neg]})'\n"
+ "select {[Customers].[Neg],[Customers].[Min],[Customers].[Max]} on 0\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[Neg]}\n"
+ "{[Customers].[Min]}\n"
+ "{[Customers].[Max]}\n"
+ "Row #0: -1\n"
+ "Row #0: -1\n"
+ "Row #0: -1\n");
}
public void testMedian() {
assertExprReturns(
"MEDIAN({[Store].[All Stores].[USA].children},"
+ "[Measures].[Store Sales])",
"159,167.84");
}
public void testMedian2() {
assertQueryReturns(
"WITH\n"
+ " Member [Time].[Time].[1st Half Sales] AS 'Sum({[Time].[1997].[Q1], [Time].[1997].[Q2]})'\n"
+ " Member [Time].[Time].[2nd Half Sales] AS 'Sum({[Time].[1997].[Q3], [Time].[1997].[Q4]})'\n"
+ " Member [Time].[Time].[Median] AS 'Median(Time.[Time].Members)'\n"
+ "SELECT\n"
+ " NON EMPTY { [Store].[Store Name].Members} ON COLUMNS,\n"
+ " { [Time].[1st Half Sales], [Time].[2nd Half Sales], [Time].[Median]} ON ROWS\n"
+ "FROM Sales\n"
+ "WHERE [Measures].[Store Sales]",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA].[Beverly Hills].[Store 6]}\n"
+ "{[Store].[USA].[CA].[Los Angeles].[Store 7]}\n"
+ "{[Store].[USA].[CA].[San Diego].[Store 24]}\n"
+ "{[Store].[USA].[CA].[San Francisco].[Store 14]}\n"
+ "{[Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Store].[USA].[OR].[Salem].[Store 13]}\n"
+ "{[Store].[USA].[WA].[Bellingham].[Store 2]}\n"
+ "{[Store].[USA].[WA].[Bremerton].[Store 3]}\n"
+ "{[Store].[USA].[WA].[Seattle].[Store 15]}\n"
+ "{[Store].[USA].[WA].[Spokane].[Store 16]}\n"
+ "{[Store].[USA].[WA].[Tacoma].[Store 17]}\n"
+ "{[Store].[USA].[WA].[Walla Walla].[Store 22]}\n"
+ "{[Store].[USA].[WA].[Yakima].[Store 23]}\n"
+ "Axis #2:\n"
+ "{[Time].[1st Half Sales]}\n"
+ "{[Time].[2nd Half Sales]}\n"
+ "{[Time].[Median]}\n"
+ "Row #0: 20,801.04\n"
+ "Row #0: 25,421.41\n"
+ "Row #0: 26,275.11\n"
+ "Row #0: 2,074.39\n"
+ "Row #0: 28,519.18\n"
+ "Row #0: 43,423.99\n"
+ "Row #0: 2,140.99\n"
+ "Row #0: 25,502.08\n"
+ "Row #0: 25,293.50\n"
+ "Row #0: 23,265.53\n"
+ "Row #0: 34,926.91\n"
+ "Row #0: 2,159.60\n"
+ "Row #0: 12,490.89\n"
+ "Row #1: 24,949.20\n"
+ "Row #1: 29,123.87\n"
+ "Row #1: 28,156.03\n"
+ "Row #1: 2,366.79\n"
+ "Row #1: 26,539.61\n"
+ "Row #1: 43,794.29\n"
+ "Row #1: 2,598.24\n"
+ "Row #1: 27,394.22\n"
+ "Row #1: 27,350.57\n"
+ "Row #1: 26,368.93\n"
+ "Row #1: 39,917.05\n"
+ "Row #1: 2,546.37\n"
+ "Row #1: 11,838.34\n"
+ "Row #2: 4,577.35\n"
+ "Row #2: 5,211.38\n"
+ "Row #2: 4,722.87\n"
+ "Row #2: 398.24\n"
+ "Row #2: 5,039.50\n"
+ "Row #2: 7,374.59\n"
+ "Row #2: 410.22\n"
+ "Row #2: 4,924.04\n"
+ "Row #2: 4,569.13\n"
+ "Row #2: 4,511.68\n"
+ "Row #2: 6,630.91\n"
+ "Row #2: 419.51\n"
+ "Row #2: 2,169.48\n");
}
public void testPercentile() {
// same result as median
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].children}, [Measures].[Store Sales], 50)",
"159,167.84");
// same result as min
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].children}, [Measures].[Store Sales], 0)",
"142,277.07");
// same result as max
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].children}, [Measures].[Store Sales], 100)",
"263,793.22");
// check some real percentile cases
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].[WA].children}, [Measures].[Store Sales], 50)",
"49,634.46");
// lets return the second element of the 7 children 4,739.23
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].[WA].children}, [Measures].[Store Sales], 100/7*2)",
"4,739.23");
assertExprReturns(
"Percentile({[Store].[All Stores].[USA].[WA].children}, [Measures].[Store Sales], 95)",
"67,162.28");
// check MONDRIAN-1045
assertExprReturns(
"Percentile({[Store].[All Stores].[USA]}, [Measures].[Store Sales], 50)",
"282,619.07");
assertExprReturns(
"Percentile({[Store].[All Stores].[USA]}, [Measures].[Store Sales], 40)",
"226,095.25");
assertExprReturns(
"Percentile({[Store].[All Stores].[USA]}, [Measures].[Store Sales], 95)",
"536,976.22");
}
public void testMin() {
assertExprReturns(
"MIN({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"142,277.07");
}
public void testMinTuple() {
assertExprReturns(
"Min([Customers].[All Customers].[USA].Children, ([Measures].[Unit Sales], [Gender].[All Gender].[F]))",
"33,036");
}
public void testStdev() {
assertExprReturns(
"STDEV({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"65,825.45");
}
public void testStdevP() {
assertExprReturns(
"STDEVP({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"53,746.26");
}
public void testSumNoExp() {
assertExprReturns(
"SUM({[Promotion Media].[Media Type].members})", "266,773");
}
public void testValue() {
// VALUE is usually a cell property, not a member property.
// We allow it because MS documents it as a function, <Member>.VALUE.
assertExprReturns("[Measures].[Store Sales].VALUE", "565,238.13");
// Depends upon almost everything.
String s1 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertExprDependsOn(
"[Measures].[Store Sales].VALUE", s1);
// We do not allow FORMATTED_VALUE.
assertExprThrows(
"[Measures].[Store Sales].FORMATTED_VALUE",
"MDX object '[Measures].[Store Sales].FORMATTED_VALUE' not found in cube 'Sales'");
assertExprReturns("[Measures].[Store Sales].NAME", "Store Sales");
// MS says that ID and KEY are standard member properties for
// OLE DB for OLAP, but not for XML/A. We don't support them.
assertExprThrows(
"[Measures].[Store Sales].ID",
"MDX object '[Measures].[Store Sales].ID' not found in cube 'Sales'");
// Error for KEY is slightly different than for ID. It doesn't matter
// very much.
//
// The error is different because KEY is registered as a Mondrian
// builtin property, but ID isn't. KEY cannot be evaluated in
// "<MEMBER>.KEY" syntax because there is not function defined. For
// other builtin properties, such as NAME, CAPTION there is a builtin
// function.
assertExprThrows(
"[Measures].[Store Sales].KEY",
"No function matches signature '<Member>.KEY'");
assertExprReturns("[Measures].[Store Sales].CAPTION", "Store Sales");
}
public void testVar() {
assertExprReturns(
"VAR({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"4,332,990,493.69");
}
public void testVarP() {
assertExprReturns(
"VARP({[Store].[All Stores].[USA].children},[Measures].[Store Sales])",
"2,888,660,329.13");
}
/**
* Tests the AS operator, that gives an expression an alias.
*/
public void testAs() {
assertAxisReturns(
"Filter([Customers].Children as t,\n"
+ "t.Current.Name = 'USA')",
"[Customers].[USA]");
// 'AS' and the ':' operator have similar precedence, so it's worth
// checking that they play nice.
assertQueryReturns(
"select\n"
+ " filter(\n"
+ " [Time].[1997].[Q1].[2] : [Time].[1997].[Q3].[9] as t,"
+ " mod(t.CurrentOrdinal, 2) = 0) on 0\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Row #0: 20,957\n"
+ "Row #0: 20,179\n"
+ "Row #0: 21,350\n"
+ "Row #0: 21,697\n");
// AS member fails on SSAS with "The CHILDREN function expects a member
// expression for the 0 argument. A tuple set expression was used."
assertQueryThrows(
"select\n"
+ " {([Time].[1997].[Q1] as t).Children, \n"
+ " t.Parent } on 0 \n"
+ "from [Sales]",
"No function matches signature '<Set>.Children'");
// Set of members. OK.
assertQueryReturns(
"select Measures.[Unit Sales] on 0, \n"
+ " {[Time].[1997].Children as t, \n"
+ " Descendants(t, [Time].[Month])} on 1 \n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 66,291\n"
+ "Row #1: 62,610\n"
+ "Row #2: 65,848\n"
+ "Row #3: 72,024\n"
+ "Row #4: 21,628\n"
+ "Row #5: 20,957\n"
+ "Row #6: 23,706\n"
+ "Row #7: 20,179\n"
+ "Row #8: 21,081\n"
+ "Row #9: 21,350\n"
+ "Row #10: 23,763\n"
+ "Row #11: 21,697\n"
+ "Row #12: 20,388\n"
+ "Row #13: 19,958\n"
+ "Row #14: 25,270\n"
+ "Row #15: 26,796\n");
// Alias a member. Implicitly becomes set. OK.
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " {[Time].[1997] as t,\n"
+ " Descendants(t, [Time].[Month])} on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 21,628\n"
+ "Row #2: 20,957\n"
+ "Row #3: 23,706\n"
+ "Row #4: 20,179\n"
+ "Row #5: 21,081\n"
+ "Row #6: 21,350\n"
+ "Row #7: 23,763\n"
+ "Row #8: 21,697\n"
+ "Row #9: 20,388\n"
+ "Row #10: 19,958\n"
+ "Row #11: 25,270\n"
+ "Row #12: 26,796\n");
// Alias a tuple. Implicitly becomes set. The error confirms that the
// named set's type is a set of tuples. SSAS gives error "Descendants
// function expects a member or set ..."
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " {([Time].[1997], [Customers].[USA].[CA]) as t,\n"
+ " Descendants(t, [Time].[Month])} on 1\n"
+ "from [Sales]",
"Argument to Descendants function must be a member or set of members, not a set of tuples");
}
public void testAs2() {
// Named set and alias with same name (t) and a second alias (t2).
// Reference to t from within descendants resolves to alias, of type
// [Time], because it is nearer.
final String result =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales], [Gender].[F]}\n"
+ "{[Measures].[Unit Sales], [Gender].[M]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "Row #0: 32,910\n"
+ "Row #0: 33,381\n"
+ "Row #1: 30,992\n"
+ "Row #1: 31,618\n"
+ "Row #2: 32,599\n"
+ "Row #2: 33,249\n"
+ "Row #3: 35,057\n"
+ "Row #3: 36,967\n"
+ "Row #4: 10,932\n"
+ "Row #4: 10,696\n"
+ "Row #5: 10,466\n"
+ "Row #5: 10,884\n"
+ "Row #6: 12,320\n"
+ "Row #6: 12,950\n";
assertQueryReturns(
"with set t as [Gender].Children\n"
+ "select\n"
+ " Measures.[Unit Sales] * t on 0,\n"
+ " {\n"
+ " [Time].[1997].Children as t,\n"
+ " Filter(\n"
+ " Descendants(t, [Time].[Month]) as t2,\n"
+ " Mod(t2.CurrentOrdinal, 5) = 0)\n"
+ " } on 1\n"
+ "from [Sales]",
result);
// Two aliases with same name. OK.
assertQueryReturns(
"select\n"
+ " Measures.[Unit Sales] * [Gender].Children as t on 0,\n"
+ " {[Time].[1997].Children as t,\n"
+ " Filter(\n"
+ " Descendants(t, [Time].[Month]) as t2,\n"
+ " Mod(t2.CurrentOrdinal, 5) = 0)\n"
+ " } on 1\n"
+ "from [Sales]",
result);
// Bug MONDRIAN-648 causes 'AS' to have lower precedence than '*'.
if (Bug.BugMondrian648Fixed) {
// Note that 'as' has higher precedence than '*'.
assertQueryReturns(
"select\n"
+ " Measures.[Unit Sales] * [Gender].Members as t on 0,\n"
+ " {t} on 1\n"
+ "from [Sales]",
"xxxxx");
}
// Reference to hierarchy on other axis.
// On SSAS 2005, finds t, and gives error,
// "The Gender hierarchy already appears in the Axis0 axis."
// On Mondrian, cannot find t. FIXME.
assertQueryThrows(
"select\n"
+ " Measures.[Unit Sales] * ([Gender].Members as t) on 0,\n"
+ " {t} on 1\n"
+ "from [Sales]",
"MDX object 't' not found in cube 'Sales'");
// As above, with parentheses. Tuple valued.
// On SSAS 2005, finds t, and gives error,
// "The Measures hierarchy already appears in the Axis0 axis."
// On Mondrian, cannot find t. FIXME.
assertQueryThrows(
"select\n"
+ " (Measures.[Unit Sales] * [Gender].Members) as t on 0,\n"
+ " {t} on 1\n"
+ "from [Sales]",
"MDX object 't' not found in cube 'Sales'");
// Calculated set, CurrentMember
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S], [Gender].[F]) > 17000) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[M]}\n"
+ "Row #0: 19,958\n"
+ "Row #1: 9,506\n"
+ "Row #2: 10,452\n"
+ "Row #3: 25,270\n"
+ "Row #4: 12,320\n"
+ "Row #5: 12,950\n"
+ "Row #6: 26,796\n"
+ "Row #7: 13,231\n"
+ "Row #8: 13,565\n");
// As above, but don't override [Gender] in filter condition. Note that
// the filter condition is evaluated in the context created by the
// filter set. So, only items with [All Gender] pass the filter.
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 35000) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "Row #0: 19,958\n"
+ "Row #1: 25,270\n"
+ "Row #2: 26,796\n");
// Multiple definitions of alias within same axis
assertQueryReturns(
"select Measures.[Unit Sales] on 0,\n"
+ " generate(\n"
+ " [Marital Status].Children as s,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S], [Gender].[F]) > 17000),\n"
+ " ALL) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[10], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[11], [Gender].[M]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[All Gender]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[F]}\n"
+ "{[Time].[1997].[Q4].[12], [Gender].[M]}\n"
+ "Row #0: 19,958\n"
+ "Row #1: 9,506\n"
+ "Row #2: 10,452\n"
+ "Row #3: 25,270\n"
+ "Row #4: 12,320\n"
+ "Row #5: 12,950\n"
+ "Row #6: 26,796\n"
+ "Row #7: 13,231\n"
+ "Row #8: 13,565\n"
+ "Row #9: 19,958\n"
+ "Row #10: 9,506\n"
+ "Row #11: 10,452\n"
+ "Row #12: 25,270\n"
+ "Row #13: 12,320\n"
+ "Row #14: 12,950\n"
+ "Row #15: 26,796\n"
+ "Row #16: 13,231\n"
+ "Row #17: 13,565\n");
// Multiple definitions of alias within same axis.
//
// On SSAS 2005, gives error, "The CURRENT function cannot be called in
// current context because the 'x' set is not in scope". SSAS 2005 gives
// same error even if set does not exist.
assertQueryThrows(
"with member Measures.Foo as 'x.Current.Name'\n"
+ "select\n"
+ " {Measures.[Unit Sales], Measures.Foo} on 0,\n"
+ " generate(\n"
+ " [Marital Status].\n"
+ " Children as x,\n"
+ " filter(\n"
+ " Gender.Members as x,\n"
+ " (x.Current, [Marital Status].[S]) > 50000),\n"
+ " ALL) on 1\n"
+ "from [Sales]",
"MDX object 'x' not found in cube 'Sales'");
// As above, but set is not out of scope; it does not exist; but error
// should be the same.
assertQueryThrows(
"with member Measures.Foo as 'z.Current.Name'\n"
+ "select\n"
+ " {Measures.[Unit Sales], Measures.Foo} on 0,\n"
+ " generate(\n"
+ " [Marital Status].\n"
+ " Children as s,\n"
+ " filter(\n"
+ " Gender.Members as s,\n"
+ " (s.Current, [Marital Status].[S]) > 50000),\n"
+ " ALL) on 1\n"
+ "from [Sales]",
"MDX object 'z' not found in cube 'Sales'");
// 'set AS string' is invalid
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as 'foo',\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 50000) on 1\n"
+ "from [Sales]",
"Syntax error at line 3, column 46, token ''foo''");
// 'set AS numeric' is invalid
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " (Time.Month.Members * Gender.Members) as 1234,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 50000) on 1\n"
+ "from [Sales]",
"Syntax error at line 3, column 46, token '1234'");
// 'numeric AS identifier' is invalid
assertQueryThrows(
"select Measures.[Unit Sales] on 0,\n"
+ " filter(\n"
+ " 123 * 456 as s,\n"
+ " (s.Current.Item(0).Parent, [Marital Status].[S]) > 50000) on 1\n"
+ "from [Sales]",
"No function matches signature '<Numeric Expression> AS <Set>'");
}
public void testAscendants() {
assertAxisReturns(
"Ascendants([Store].[USA].[CA])",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]");
}
public void testAscendantsAll() {
assertAxisReturns(
"Ascendants([Store].DefaultMember)", "[Store].[All Stores]");
}
public void testAscendantsNull() {
assertAxisReturns(
"Ascendants([Gender].[F].PrevMember)", "");
}
public void testBottomCount() {
assertAxisReturns(
"BottomCount({[Promotion Media].[Media Type].members}, 2, [Measures].[Unit Sales])",
"[Promotion Media].[Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]");
}
//todo: test unordered
public void testBottomPercent() {
assertAxisReturns(
"BottomPercent(Filter({[Store].[All Stores].[USA].[CA].Children, [Store].[All Stores].[USA].[OR].Children, [Store].[All Stores].[USA].[WA].Children}, ([Measures].[Unit Sales] > 0.0)), 100.0, [Measures].[Store Sales])",
"[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[WA].[Walla Walla]\n"
+ "[Store].[USA].[WA].[Bellingham]\n"
+ "[Store].[USA].[WA].[Yakima]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[WA].[Spokane]\n"
+ "[Store].[USA].[WA].[Seattle]\n"
+ "[Store].[USA].[WA].[Bremerton]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[WA].[Tacoma]\n"
+ "[Store].[USA].[OR].[Salem]");
assertAxisReturns(
"BottomPercent({[Promotion Media].[Media Type].members}, 1, [Measures].[Unit Sales])",
"[Promotion Media].[Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]");
}
//todo: test precision
public void testBottomSum() {
assertAxisReturns(
"BottomSum({[Promotion Media].[Media Type].members}, 5000, [Measures].[Unit Sales])",
"[Promotion Media].[Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]");
}
public void testExceptEmpty() {
// If left is empty, result is empty.
assertAxisReturns(
"Except(Filter([Gender].Members, 1=0), {[Gender].[M]})", "");
// If right is empty, result is left.
assertAxisReturns(
"Except({[Gender].[M]}, Filter([Gender].Members, 1=0))",
"[Gender].[M]");
}
/**
* Tests that Except() successfully removes crossjoined tuples
* from the axis results. Previously, this would fail by returning
* all tuples in the first argument to Except. bug 1439627
*/
public void testExceptCrossjoin() {
assertAxisReturns(
"Except(CROSSJOIN({[Promotion Media].[All Media]},\n"
+ " [Product].[All Products].Children),\n"
+ " CROSSJOIN({[Promotion Media].[All Media]},\n"
+ " {[Product].[All Products].[Drink]}))",
"{[Promotion Media].[All Media], [Product].[Food]}\n"
+ "{[Promotion Media].[All Media], [Product].[Non-Consumable]}");
}
public void testExtract() {
assertAxisReturns(
"Extract(\n"
+ "Crossjoin({[Gender].[F], [Gender].[M]},\n"
+ " {[Marital Status].Members}),\n"
+ "[Gender])",
"[Gender].[F]\n" + "[Gender].[M]");
// Extract(<set>) with no dimensions is not valid
assertAxisThrows(
"Extract(Crossjoin({[Gender].[F], [Gender].[M]}, {[Marital Status].Members}))",
"No function matches signature 'Extract(<Set>)'");
// Extract applied to non-constant dimension should fail
assertAxisThrows(
"Extract(Crossjoin([Gender].Members, [Store].Children), [Store].Hierarchy.Dimension)",
"not a constant hierarchy: [Store].Hierarchy.Dimension");
// Extract applied to non-constant hierarchy should fail
assertAxisThrows(
"Extract(Crossjoin([Gender].Members, [Store].Children), [Store].Hierarchy)",
"not a constant hierarchy: [Store].Hierarchy");
// Extract applied to set of members is OK (if silly). Duplicates are
// removed, as always.
assertAxisReturns(
"Extract({[Gender].[M], [Gender].Members}, [Gender])",
"[Gender].[M]\n"
+ "[Gender].[All Gender]\n"
+ "[Gender].[F]");
// Extract of hierarchy not in set fails
assertAxisThrows(
"Extract(Crossjoin([Gender].Members, [Store].Children), [Marital Status])",
"hierarchy [Marital Status] is not a hierarchy of the expression Crossjoin([Gender].Members, [Store].Children)");
// Extract applied to empty set returns empty set
assertAxisReturns(
"Extract(Crossjoin({[Gender].Parent}, [Store].Children), [Store])",
"");
// Extract applied to asymmetric set
assertAxisReturns(
"Extract(\n"
+ "{([Gender].[M], [Marital Status].[M]),\n"
+ " ([Gender].[F], [Marital Status].[M]),\n"
+ " ([Gender].[M], [Marital Status].[S])},\n"
+ "[Gender])",
"[Gender].[M]\n" + "[Gender].[F]");
// Extract applied to asymmetric set (other side)
assertAxisReturns(
"Extract(\n"
+ "{([Gender].[M], [Marital Status].[M]),\n"
+ " ([Gender].[F], [Marital Status].[M]),\n"
+ " ([Gender].[M], [Marital Status].[S])},\n"
+ "[Marital Status])",
"[Marital Status].[M]\n"
+ "[Marital Status].[S]");
// Extract more than one hierarchy
assertAxisReturns(
"Extract(\n"
+ "[Gender].Children * [Marital Status].Children * [Time].[1997].Children * [Store].[USA].Children,\n"
+ "[Time], [Marital Status])",
"{[Time].[1997].[Q1], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q2], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q3], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q4], [Marital Status].[M]}\n"
+ "{[Time].[1997].[Q1], [Marital Status].[S]}\n"
+ "{[Time].[1997].[Q2], [Marital Status].[S]}\n"
+ "{[Time].[1997].[Q3], [Marital Status].[S]}\n"
+ "{[Time].[1997].[Q4], [Marital Status].[S]}");
// Extract duplicate hierarchies fails
assertAxisThrows(
"Extract(\n"
+ "{([Gender].[M], [Marital Status].[M]),\n"
+ " ([Gender].[F], [Marital Status].[M]),\n"
+ " ([Gender].[M], [Marital Status].[S])},\n"
+ "[Gender], [Gender])",
"hierarchy [Gender] is extracted more than once");
}
/**
* Tests that TopPercent() operates succesfully on a
* axis of crossjoined tuples. previously, this would
* fail with a ClassCastException in FunUtil.java. bug 1440306
*/
public void testTopPercentCrossjoin() {
assertAxisReturns(
"{TopPercent(Crossjoin([Product].[Product Department].members,\n"
+ "[Time].[1997].children),10,[Measures].[Store Sales])}",
"{[Product].[Food].[Produce], [Time].[1997].[Q4]}\n"
+ "{[Product].[Food].[Produce], [Time].[1997].[Q1]}\n"
+ "{[Product].[Food].[Produce], [Time].[1997].[Q3]}");
}
public void testCrossjoinNested() {
assertAxisReturns(
" CrossJoin(\n"
+ " CrossJoin(\n"
+ " [Gender].members,\n"
+ " [Marital Status].members),\n"
+ " {[Store], [Store].children})",
"{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[All Stores]}\n"
+ "{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[Canada]}\n"
+ "{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[Mexico]}\n"
+ "{[Gender].[All Gender], [Marital Status].[All Marital Status], [Store].[USA]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[All Stores]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[Canada]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[Mexico]}\n"
+ "{[Gender].[All Gender], [Marital Status].[M], [Store].[USA]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[All Stores]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S], [Store].[USA]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[All Stores]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[Canada]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[Mexico]}\n"
+ "{[Gender].[F], [Marital Status].[All Marital Status], [Store].[USA]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[All Stores]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[Canada]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[Mexico]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Store].[USA]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[All Stores]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Store].[USA]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[All Stores]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[All Marital Status], [Store].[USA]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[All Stores]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Store].[USA]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[All Stores]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[USA]}");
}
public void testCrossjoinSingletonTuples() {
assertAxisReturns(
"CrossJoin({([Gender].[M])}, {([Marital Status].[S])})",
"{[Gender].[M], [Marital Status].[S]}");
}
public void testCrossjoinSingletonTuplesNested() {
assertAxisReturns(
"CrossJoin({([Gender].[M])}, CrossJoin({([Marital Status].[S])}, [Store].children))",
"{[Gender].[M], [Marital Status].[S], [Store].[Canada]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[Mexico]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Store].[USA]}");
}
public void testCrossjoinAsterisk() {
assertAxisReturns(
"{[Gender].[M]} * {[Marital Status].[S]}",
"{[Gender].[M], [Marital Status].[S]}");
}
public void testCrossjoinAsteriskTuple() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} ON COLUMNS, "
+ "NON EMPTY [Store].[All Stores] "
+ " * ([Product].[All Products], [Gender]) "
+ " * [Customers].[All Customers] ON ROWS "
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[All Stores], [Product].[All Products], [Gender].[All Gender], [Customers].[All Customers]}\n"
+ "Row #0: 266,773\n");
}
public void testCrossjoinAsteriskAssoc() {
assertAxisReturns(
"Order({[Gender].Children} * {[Marital Status].Children} * {[Time].[1997].[Q2].Children},"
+ "[Measures].[Unit Sales])",
"{[Gender].[F], [Marital Status].[M], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[F], [Marital Status].[M], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[F], [Marital Status].[S], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[M], [Marital Status].[M], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[6]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[5]}");
}
public void testCrossjoinAsteriskInsideBraces() {
assertAxisReturns(
"{[Gender].[M] * [Marital Status].[S] * [Time].[1997].[Q2].Children}",
"{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[4]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[5]}\n"
+ "{[Gender].[M], [Marital Status].[S], [Time].[1997].[Q2].[6]}");
}
public void testCrossJoinAsteriskQuery() {
assertQueryReturns(
"SELECT {[Measures].members * [1997].children} ON COLUMNS,\n"
+ " {[Store].[USA].children * [Position].[All Position].children} DIMENSION PROPERTIES [Store].[Store SQFT] ON ROWS\n"
+ "FROM [HR]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q1]}\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q2]}\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q3]}\n"
+ "{[Measures].[Org Salary], [Time].[1997].[Q4]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q1]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q2]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q3]}\n"
+ "{[Measures].[Count], [Time].[1997].[Q4]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q1]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q2]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q3]}\n"
+ "{[Measures].[Number of Employees], [Time].[1997].[Q4]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[CA], [Position].[Middle Management]}\n"
+ "{[Store].[USA].[CA], [Position].[Senior Management]}\n"
+ "{[Store].[USA].[CA], [Position].[Store Full Time Staf]}\n"
+ "{[Store].[USA].[CA], [Position].[Store Management]}\n"
+ "{[Store].[USA].[CA], [Position].[Store Temp Staff]}\n"
+ "{[Store].[USA].[OR], [Position].[Middle Management]}\n"
+ "{[Store].[USA].[OR], [Position].[Senior Management]}\n"
+ "{[Store].[USA].[OR], [Position].[Store Full Time Staf]}\n"
+ "{[Store].[USA].[OR], [Position].[Store Management]}\n"
+ "{[Store].[USA].[OR], [Position].[Store Temp Staff]}\n"
+ "{[Store].[USA].[WA], [Position].[Middle Management]}\n"
+ "{[Store].[USA].[WA], [Position].[Senior Management]}\n"
+ "{[Store].[USA].[WA], [Position].[Store Full Time Staf]}\n"
+ "{[Store].[USA].[WA], [Position].[Store Management]}\n"
+ "{[Store].[USA].[WA], [Position].[Store Temp Staff]}\n"
+ "Row #0: $275.40\n"
+ "Row #0: $275.40\n"
+ "Row #0: $275.40\n"
+ "Row #0: $275.40\n"
+ "Row #0: 27\n"
+ "Row #0: 27\n"
+ "Row #0: 27\n"
+ "Row #0: 27\n"
+ "Row #0: 9\n"
+ "Row #0: 9\n"
+ "Row #0: 9\n"
+ "Row #0: 9\n"
+ "Row #1: $837.00\n"
+ "Row #1: $837.00\n"
+ "Row #1: $837.00\n"
+ "Row #1: $837.00\n"
+ "Row #1: 24\n"
+ "Row #1: 24\n"
+ "Row #1: 24\n"
+ "Row #1: 24\n"
+ "Row #1: 8\n"
+ "Row #1: 8\n"
+ "Row #1: 8\n"
+ "Row #1: 8\n"
+ "Row #2: $1,728.45\n"
+ "Row #2: $1,727.02\n"
+ "Row #2: $1,727.72\n"
+ "Row #2: $1,726.55\n"
+ "Row #2: 357\n"
+ "Row #2: 357\n"
+ "Row #2: 357\n"
+ "Row #2: 357\n"
+ "Row #2: 119\n"
+ "Row #2: 119\n"
+ "Row #2: 119\n"
+ "Row #2: 119\n"
+ "Row #3: $473.04\n"
+ "Row #3: $473.04\n"
+ "Row #3: $473.04\n"
+ "Row #3: $473.04\n"
+ "Row #3: 51\n"
+ "Row #3: 51\n"
+ "Row #3: 51\n"
+ "Row #3: 51\n"
+ "Row #3: 17\n"
+ "Row #3: 17\n"
+ "Row #3: 17\n"
+ "Row #3: 17\n"
+ "Row #4: $401.35\n"
+ "Row #4: $405.73\n"
+ "Row #4: $400.61\n"
+ "Row #4: $402.31\n"
+ "Row #4: 120\n"
+ "Row #4: 120\n"
+ "Row #4: 120\n"
+ "Row #4: 120\n"
+ "Row #4: 40\n"
+ "Row #4: 40\n"
+ "Row #4: 40\n"
+ "Row #4: 40\n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #5: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #6: \n"
+ "Row #7: $1,343.62\n"
+ "Row #7: $1,342.61\n"
+ "Row #7: $1,342.57\n"
+ "Row #7: $1,343.65\n"
+ "Row #7: 279\n"
+ "Row #7: 279\n"
+ "Row #7: 279\n"
+ "Row #7: 279\n"
+ "Row #7: 93\n"
+ "Row #7: 93\n"
+ "Row #7: 93\n"
+ "Row #7: 93\n"
+ "Row #8: $286.74\n"
+ "Row #8: $286.74\n"
+ "Row #8: $286.74\n"
+ "Row #8: $286.74\n"
+ "Row #8: 30\n"
+ "Row #8: 30\n"
+ "Row #8: 30\n"
+ "Row #8: 30\n"
+ "Row #8: 10\n"
+ "Row #8: 10\n"
+ "Row #8: 10\n"
+ "Row #8: 10\n"
+ "Row #9: $333.20\n"
+ "Row #9: $332.65\n"
+ "Row #9: $331.28\n"
+ "Row #9: $332.43\n"
+ "Row #9: 99\n"
+ "Row #9: 99\n"
+ "Row #9: 99\n"
+ "Row #9: 99\n"
+ "Row #9: 33\n"
+ "Row #9: 33\n"
+ "Row #9: 33\n"
+ "Row #9: 33\n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #10: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #11: \n"
+ "Row #12: $2,768.60\n"
+ "Row #12: $2,769.18\n"
+ "Row #12: $2,766.78\n"
+ "Row #12: $2,769.50\n"
+ "Row #12: 579\n"
+ "Row #12: 579\n"
+ "Row #12: 579\n"
+ "Row #12: 579\n"
+ "Row #12: 193\n"
+ "Row #12: 193\n"
+ "Row #12: 193\n"
+ "Row #12: 193\n"
+ "Row #13: $736.29\n"
+ "Row #13: $736.29\n"
+ "Row #13: $736.29\n"
+ "Row #13: $736.29\n"
+ "Row #13: 81\n"
+ "Row #13: 81\n"
+ "Row #13: 81\n"
+ "Row #13: 81\n"
+ "Row #13: 27\n"
+ "Row #13: 27\n"
+ "Row #13: 27\n"
+ "Row #13: 27\n"
+ "Row #14: $674.70\n"
+ "Row #14: $674.54\n"
+ "Row #14: $676.26\n"
+ "Row #14: $676.48\n"
+ "Row #14: 201\n"
+ "Row #14: 201\n"
+ "Row #14: 201\n"
+ "Row #14: 201\n"
+ "Row #14: 67\n"
+ "Row #14: 67\n"
+ "Row #14: 67\n"
+ "Row #14: 67\n");
}
/**
* Testcase for bug 1889745, "StackOverflowError while resolving
* crossjoin". The problem occurs when a calculated member that references
* itself is referenced in a crossjoin.
*/
public void testCrossjoinResolve() {
assertQueryReturns(
"with\n"
+ "member [Measures].[Filtered Unit Sales] as\n"
+ " 'IIf((([Measures].[Unit Sales] > 50000.0)\n"
+ " OR ([Product].CurrentMember.Level.UniqueName <>\n"
+ " \"[Product].[Product Family]\")),\n"
+ " IIf(((Count([Product].CurrentMember.Children) = 0.0)),\n"
+ " [Measures].[Unit Sales],\n"
+ " Sum([Product].CurrentMember.Children,\n"
+ " [Measures].[Filtered Unit Sales])),\n"
+ " NULL)'\n"
+ "select NON EMPTY {crossjoin({[Measures].[Filtered Unit Sales]},\n"
+ "{[Gender].[M], [Gender].[F]})} ON COLUMNS,\n"
+ "NON EMPTY {[Product].[All Products]} ON ROWS\n"
+ "from [Sales]\n"
+ "where [Time].[1997]",
"Axis #0:\n"
+ "{[Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Filtered Unit Sales], [Gender].[M]}\n"
+ "{[Measures].[Filtered Unit Sales], [Gender].[F]}\n"
+ "Axis #2:\n"
+ "{[Product].[All Products]}\n"
+ "Row #0: 97,126\n"
+ "Row #0: 94,814\n");
}
/**
* Test case for bug 1911832, "Exception converting immutable list to array
* in JDK 1.5".
*/
public void testCrossjoinOrder() {
assertQueryReturns(
"WITH\n"
+ "\n"
+ "SET [S1] AS 'CROSSJOIN({[Time].[1997]}, {[Gender].[Gender].MEMBERS})'\n"
+ "\n"
+ "SELECT CROSSJOIN(ORDER([S1], [Measures].[Unit Sales], BDESC),\n"
+ "{[Measures].[Unit Sales]}) ON AXIS(0)\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997], [Gender].[M], [Measures].[Unit Sales]}\n"
+ "{[Time].[1997], [Gender].[F], [Measures].[Unit Sales]}\n"
+ "Row #0: 135,215\n"
+ "Row #0: 131,558\n");
}
public void testCrossjoinDupHierarchyFails() {
assertQueryThrows(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {[Time].[Month].[5]}) ON ROWS\n"
+ "from [Sales]",
"Tuple contains more than one member of hierarchy '[Time]'.");
// now with Item, for kicks
assertQueryThrows(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {[Time].[Month].[5]}).Item(0) ON ROWS\n"
+ "from [Sales]",
"Tuple contains more than one member of hierarchy '[Time]'.");
// same query using explicit tuple
assertQueryThrows(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " ([Time].[Quarter].[Q1], [Time].[Month].[5]) ON ROWS\n"
+ "from [Sales]",
"Tuple contains more than one member of hierarchy '[Time]'.");
}
/**
* Tests cases of different hierarchies in the same dimension.
* (Compare to {@link #testCrossjoinDupHierarchyFails()}). Not an error.
*/
public void testCrossjoinDupDimensionOk() {
final String expectedResult =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1], [Time].[Weekly].[1997].[10]}\n"
+ "Row #0: 4,395\n";
final String timeWeekly = TestContext.hierarchyName("Time", "Weekly");
assertQueryReturns(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {"
+ timeWeekly + ".[1997].[10]}) ON ROWS\n"
+ "from [Sales]",
expectedResult);
// now with Item, for kicks
assertQueryReturns(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " CrossJoin({[Time].[Quarter].[Q1]}, {"
+ timeWeekly + ".[1997].[10]}).Item(0) ON ROWS\n"
+ "from [Sales]",
expectedResult);
// same query using explicit tuple
assertQueryReturns(
"select [Measures].[Unit Sales] ON COLUMNS,\n"
+ " ([Time].[Quarter].[Q1], "
+ timeWeekly + ".[1997].[10]) ON ROWS\n"
+ "from [Sales]",
expectedResult);
}
public void testDescendantsM() {
assertAxisReturns(
"Descendants([Time].[1997].[Q1])",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]");
}
public void testDescendantsDepends() {
getTestContext().assertSetExprDependsOn(
"Descendants([Time].[Time].CurrentMember)",
"{[Time]}");
}
public void testDescendantsML() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Month])",
months);
}
public void testDescendantsMLSelf() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], SELF)",
quarters);
}
public void testDescendantsMLLeaves() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Year], LEAVES)",
"");
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], LEAVES)",
"");
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Month], LEAVES)",
months);
assertAxisReturns(
"Descendants([Gender], [Gender].[Gender], leaves)",
"[Gender].[F]\n" + "[Gender].[M]");
}
public void testDescendantsMLLeavesRagged() {
// no cities are at leaf level
final TestContext raggedContext =
getTestContext().withCube("[Sales Ragged]");
raggedContext.assertAxisReturns(
"Descendants([Store].[Israel], [Store].[Store City], leaves)",
"");
// all cities are leaves
raggedContext.assertAxisReturns(
"Descendants([Geography].[Israel], [Geography].[City], leaves)",
"[Geography].[Israel].[Israel].[Haifa]\n"
+ "[Geography].[Israel].[Israel].[Tel Aviv]");
// No state is a leaf (not even Israel, which is both a country and a
// a state, or Vatican, with is a country/state/city)
raggedContext.assertAxisReturns(
"Descendants([Geography], [Geography].[State], leaves)",
"");
// The Vatican is a nation with no children (they're all celibate,
// you know).
raggedContext.assertAxisReturns(
"Descendants([Geography], [Geography].[Country], leaves)",
"[Geography].[Vatican]");
}
public void testDescendantsMNLeaves() {
// leaves at depth 0 returns the member itself
assertAxisReturns(
"Descendants([Time].[1997].[Q2].[4], 0, Leaves)",
"[Time].[1997].[Q2].[4]");
// leaves at depth > 0 returns the member itself
assertAxisReturns(
"Descendants([Time].[1997].[Q2].[4], 100, Leaves)",
"[Time].[1997].[Q2].[4]");
// leaves at depth < 0 returns all descendants
assertAxisReturns(
"Descendants([Time].[1997].[Q2], -1, Leaves)",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
// leaves at depth 0 returns the member itself
assertAxisReturns(
"Descendants([Time].[1997].[Q2], 0, Leaves)",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
assertAxisReturns(
"Descendants([Time].[1997].[Q2], 3, Leaves)",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
}
public void testDescendantsMLSelfBefore() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], SELF_AND_BEFORE)",
year1997 + "\n" + quarters);
}
public void testDescendantsMLSelfBeforeAfter() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], SELF_BEFORE_AFTER)",
hierarchized1997);
}
public void testDescendantsMLBefore() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], BEFORE)", year1997);
}
public void testDescendantsMLBeforeAfter() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], BEFORE_AND_AFTER)",
year1997 + "\n" + months);
}
public void testDescendantsMLAfter() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Quarter], AFTER)", months);
}
public void testDescendantsMLAfterEnd() {
assertAxisReturns(
"Descendants([Time].[1997], [Time].[Month], AFTER)", "");
}
public void testDescendantsM0() {
assertAxisReturns(
"Descendants([Time].[1997], 0)", year1997);
}
public void testDescendantsM2() {
assertAxisReturns(
"Descendants([Time].[1997], 2)", months);
}
public void testDescendantsM2Self() {
assertAxisReturns(
"Descendants([Time].[1997], 2, Self)", months);
}
public void testDescendantsM2Leaves() {
assertAxisReturns(
"Descendants([Time].[1997], 2, Leaves)", months);
}
public void testDescendantsMFarLeaves() {
assertAxisReturns(
"Descendants([Time].[1997], 10000, Leaves)", months);
}
public void testDescendantsMEmptyLeaves() {
assertAxisReturns(
"Descendants([Time].[1997], , Leaves)",
months);
}
public void testDescendantsMEmptyLeavesFail() {
assertAxisThrows(
"Descendants([Time].[1997],)",
"No function matches signature 'Descendants(<Member>, <Empty>)");
}
public void testDescendantsMEmptyLeavesFail2() {
assertAxisThrows(
"Descendants([Time].[1997], , AFTER)",
"depth must be specified unless DESC_FLAG is LEAVES");
}
public void testDescendantsMFarSelf() {
assertAxisReturns(
"Descendants([Time].[1997], 10000, Self)",
"");
}
public void testDescendantsMNY() {
assertAxisReturns(
"Descendants([Time].[1997], 1, BEFORE_AND_AFTER)",
year1997 + "\n" + months);
}
public void testDescendants2ndHier() {
assertAxisReturns(
"Descendants([Time.Weekly].[1997].[10], [Time.Weekly].[Day])",
"[Time].[Weekly].[1997].[10].[1]\n"
+ "[Time].[Weekly].[1997].[10].[23]\n"
+ "[Time].[Weekly].[1997].[10].[24]\n"
+ "[Time].[Weekly].[1997].[10].[25]\n"
+ "[Time].[Weekly].[1997].[10].[26]\n"
+ "[Time].[Weekly].[1997].[10].[27]\n"
+ "[Time].[Weekly].[1997].[10].[28]");
}
public void testDescendantsParentChild() {
getTestContext().withCube("HR").assertAxisReturns(
"Descendants([Employees], 2)",
"[Employees].[Sheri Nowmer].[Derrick Whelply]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence]\n"
+ "[Employees].[Sheri Nowmer].[Maya Gutierrez]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold]");
}
public void testDescendantsParentChildBefore() {
getTestContext().withCube("HR").assertAxisReturns(
"Descendants([Employees], 2, BEFORE)",
"[Employees].[All Employees]\n"
+ "[Employees].[Sheri Nowmer]");
}
public void testDescendantsParentChildLeaves() {
final TestContext testContext = getTestContext().withCube("HR");
if (Bug.avoidSlowTestOnLucidDB(testContext.getDialect())) {
return;
}
// leaves, restricted by level
testContext.assertAxisReturns(
"Descendants([Employees].[All Employees].[Sheri Nowmer].[Michael Spence], [Employees].[Employee Id], LEAVES)",
"[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[John Brooks]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Todd Logan]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Joshua Several]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[James Thomas]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Robert Vessa]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Bronson Jacobs]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Rebecca Barley]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Emilio Alvaro]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Becky Waters]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[A. Joyce Jarvis]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Ruby Sue Styles]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Lisa Roy]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Ingrid Burkhardt]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Todd Whitney]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Barbara Wisnewski]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Karren Burkhardt]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[John Long]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Edwin Olenzek]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Jessie Valerio]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Robert Ahlering]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Megan Burke]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Mary Sandidge].[Karel Bates]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[James Tran]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Shelley Crow]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Anne Sims]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Clarence Tatman]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Jan Nelsen]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Jeanie Glenn]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Peggy Smith]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Tish Duff]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Anita Lucero]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Stephen Burton]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Amy Consentino]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Stacie Mcanich]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Mary Browning]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Alexandra Wellington]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Cory Bacugalupi]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Stacy Rizzi]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Mike White]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Marty Simpson]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Robert Jones]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Raul Casts]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Bridget Browqett]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Monk Skonnard].[Kay Kartz]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Jeanette Cole]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Phyllis Huntsman]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Hannah Arakawa]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Wathalee Steuber]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Pamela Cox]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Helen Lutes]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Linda Ecoffey]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Katherine Swint]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Dianne Slattengren]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Ronald Heymsfield]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Steven Whitehead]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[William Sotelo]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Beth Stanley]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Jill Markwood]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Mildred Valentine]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Suzann Reams]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Audrey Wold]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Susan French]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Trish Pederson]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Eric Renn]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Elizabeth Catalano]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Christopher Beck].[Eric Coleman]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Catherine Abel]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Emilo Miller]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Daniel Wolter].[Michael John Troyer].[Hazel Walker]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Linda Blasingame]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Jackie Blackwell]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[John Ortiz]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Stacey Tearpak]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Fannye Weber]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Diane Kabbes]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Brenda Heaney]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Sara Pettengill].[Judith Karavites]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Jauna Elson]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Nancy Hirota]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Marie Moya]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Nicky Chesnut]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Karen Hall]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Greg Narberes]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Anna Townsend]\n"
+ "[Employees].[Sheri Nowmer].[Michael Spence].[Dianne Collins].[Lawrence Hurkett].[Carol Ann Rockne]");
// leaves, restricted by depth
testContext.assertAxisReturns(
"Descendants([Employees], 1, LEAVES)", "");
testContext.assertAxisReturns(
"Descendants([Employees], 2, LEAVES)",
"[Employees].[Sheri Nowmer].[Roberta Damstra].[Jennifer Cooper]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Peggy Petty]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Jessica Olguin]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Phyllis Burchett]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Juanita Sharp]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Sandra Brunner]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Ernest Staton]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Rose Sims]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Lauretta De Carlo]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Mary Williams]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Terri Burke]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Audrey Osborn]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Brian Binai]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Concepcion Lozada]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Doris Carter]");
testContext.assertAxisReturns(
"Descendants([Employees], 3, LEAVES)",
"[Employees].[Sheri Nowmer].[Roberta Damstra].[Jennifer Cooper]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Peggy Petty]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Jessica Olguin]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Phyllis Burchett]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Juanita Sharp]\n"
+ "[Employees].[Sheri Nowmer].[Rebecca Kanagaki].[Sandra Brunner]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Ernest Staton]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Rose Sims]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Lauretta De Carlo]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Mary Williams]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Terri Burke]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Audrey Osborn]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Brian Binai]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz].[Concepcion Lozada]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]\n"
+ "[Employees].[Sheri Nowmer].[Donna Arnold].[Doris Carter]");
// note that depth is RELATIVE to the starting member
testContext.assertAxisReturns(
"Descendants([Employees].[Sheri Nowmer].[Roberta Damstra], 1, LEAVES)",
"[Employees].[Sheri Nowmer].[Roberta Damstra].[Jennifer Cooper]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Peggy Petty]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Jessica Olguin]\n"
+ "[Employees].[Sheri Nowmer].[Roberta Damstra].[Phyllis Burchett]");
// Howard Bechard is a leaf member -- appears even at depth 0
testContext.assertAxisReturns(
"Descendants([Employees].[All Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard], 0, LEAVES)",
"[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]");
testContext.assertAxisReturns(
"Descendants([Employees].[All Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard], 1, LEAVES)",
"[Employees].[Sheri Nowmer].[Donna Arnold].[Howard Bechard]");
testContext.assertExprReturns(
"Count(Descendants([Employees], 2, LEAVES))", "16");
testContext.assertExprReturns(
"Count(Descendants([Employees], 3, LEAVES))", "16");
testContext.assertExprReturns(
"Count(Descendants([Employees], 4, LEAVES))", "63");
testContext.assertExprReturns(
"Count(Descendants([Employees], 999, LEAVES))", "1,044");
// Negative depth acts like +infinity (per MSAS). Run the test several
// times because we had a non-deterministic bug here.
for (int i = 0; i < 100; ++i) {
testContext.assertExprReturns(
"Count(Descendants([Employees], -1, LEAVES))", "1,044");
}
}
public void testDescendantsSBA() {
assertAxisReturns(
"Descendants([Time].[1997], 1, SELF_BEFORE_AFTER)",
hierarchized1997);
}
public void testDescendantsSet() {
assertAxisReturns(
"Descendants({[Time].[1997].[Q4], [Time].[1997].[Q2]}, 1)",
"[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]");
assertAxisReturns(
"Descendants({[Time].[1997]}, [Time].[Month], LEAVES)",
months);
}
public void testDescendantsSetEmpty() {
assertAxisThrows(
"Descendants({}, 1)",
"Cannot deduce type of set");
assertAxisReturns(
"Descendants(Filter({[Time].[Time].Members}, 1=0), 1)",
"");
}
public void testRange() {
assertAxisReturns(
"[Time].[1997].[Q1].[2] : [Time].[1997].[Q2].[5]",
"[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]"); // not parents
// testcase for bug XXXXX: braces required
assertQueryReturns(
"with set [Set1] as '[Product].[Drink]:[Product].[Food]' \n"
+ "\n"
+ "select [Set1] on columns, {[Measures].defaultMember} on rows \n"
+ "\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Food]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Row #0: 24,597\n"
+ "Row #0: 191,940\n");
}
/**
* tests that a null passed in returns an empty set in range function
*/
public void testNullRange() {
assertAxisReturns(
"[Time].[1997].[Q1].[2] : NULL", //[Time].[1997].[Q2].[5]
""); // Empty Set
}
/**
* tests that an exception is thrown if both parameters in a range function
* are null.
*/
public void testTwoNullRange() {
assertAxisThrows(
"NULL : NULL",
"Mondrian Error:Failed to parse query 'select {NULL : NULL} on columns from Sales'");
}
/**
* Large dimensions use a different member reader, therefore need to
* be tested separately.
*/
public void testRangeLarge() {
assertAxisReturns(
"[Customers].[USA].[CA].[San Francisco] : [Customers].[USA].[WA].[Bellingham]",
"[Customers].[USA].[CA].[San Francisco]\n"
+ "[Customers].[USA].[CA].[San Gabriel]\n"
+ "[Customers].[USA].[CA].[San Jose]\n"
+ "[Customers].[USA].[CA].[Santa Cruz]\n"
+ "[Customers].[USA].[CA].[Santa Monica]\n"
+ "[Customers].[USA].[CA].[Spring Valley]\n"
+ "[Customers].[USA].[CA].[Torrance]\n"
+ "[Customers].[USA].[CA].[West Covina]\n"
+ "[Customers].[USA].[CA].[Woodland Hills]\n"
+ "[Customers].[USA].[OR].[Albany]\n"
+ "[Customers].[USA].[OR].[Beaverton]\n"
+ "[Customers].[USA].[OR].[Corvallis]\n"
+ "[Customers].[USA].[OR].[Lake Oswego]\n"
+ "[Customers].[USA].[OR].[Lebanon]\n"
+ "[Customers].[USA].[OR].[Milwaukie]\n"
+ "[Customers].[USA].[OR].[Oregon City]\n"
+ "[Customers].[USA].[OR].[Portland]\n"
+ "[Customers].[USA].[OR].[Salem]\n"
+ "[Customers].[USA].[OR].[W. Linn]\n"
+ "[Customers].[USA].[OR].[Woodburn]\n"
+ "[Customers].[USA].[WA].[Anacortes]\n"
+ "[Customers].[USA].[WA].[Ballard]\n"
+ "[Customers].[USA].[WA].[Bellingham]");
}
public void testRangeStartEqualsEnd() {
assertAxisReturns(
"[Time].[1997].[Q3].[7] : [Time].[1997].[Q3].[7]",
"[Time].[1997].[Q3].[7]");
}
public void testRangeStartEqualsEndLarge() {
assertAxisReturns(
"[Customers].[USA].[CA] : [Customers].[USA].[CA]",
"[Customers].[USA].[CA]");
}
public void testRangeEndBeforeStart() {
assertAxisReturns(
"[Time].[1997].[Q3].[7] : [Time].[1997].[Q2].[5]",
"[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]"); // same as if reversed
}
public void testRangeEndBeforeStartLarge() {
assertAxisReturns(
"[Customers].[USA].[WA] : [Customers].[USA].[CA]",
"[Customers].[USA].[CA]\n"
+ "[Customers].[USA].[OR]\n"
+ "[Customers].[USA].[WA]");
}
public void testRangeBetweenDifferentLevelsIsError() {
assertAxisThrows(
"[Time].[1997].[Q2] : [Time].[1997].[Q2].[5]",
"Members must belong to the same level");
}
public void testRangeBoundedByAll() {
assertAxisReturns(
"[Gender] : [Gender]",
"[Gender].[All Gender]");
}
public void testRangeBoundedByAllLarge() {
assertAxisReturns(
"[Customers].DefaultMember : [Customers]",
"[Customers].[All Customers]");
}
public void testRangeBoundedByNull() {
assertAxisReturns(
"[Gender].[F] : [Gender].[M].NextMember",
"");
}
public void testRangeBoundedByNullLarge() {
assertAxisReturns(
"[Customers].PrevMember : [Customers].[USA].[OR]",
"");
}
public void testSetContainingLevelFails() {
assertAxisThrows(
"[Store].[Store City]",
"No function matches signature '{<Level>}'");
}
public void testBug715177() {
assertQueryReturns(
"WITH MEMBER [Product].[Non-Consumable].[Other] AS\n"
+ " 'Sum(Except( [Product].[Product Department].Members,\n"
+ " TopCount([Product].[Product Department].Members, 3)),\n"
+ " Measures.[Unit Sales])'\n"
+ "SELECT\n"
+ " { [Measures].[Unit Sales] } ON COLUMNS,\n"
+ " { TopCount([Product].[Product Department].Members,3),\n"
+ " [Product].[Non-Consumable].[Other] } ON ROWS\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Non-Consumable].[Other]}\n"
+ "Row #0: 6,838\n"
+ "Row #1: 13,573\n"
+ "Row #2: 4,186\n"
+ "Row #3: 242,176\n");
}
public void testBug714707() {
// Same issue as bug 715177 -- "children" returns immutable
// list, which set operator must make mutable.
assertAxisReturns(
"{[Store].[USA].[CA].children, [Store].[USA]}",
"[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA]");
}
public void testBug715177c() {
assertAxisReturns(
"Order(TopCount({[Store].[USA].[CA].children},"
+ " [Measures].[Unit Sales], 2), [Measures].[Unit Sales])",
"[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[San Francisco]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[Los Angeles]");
}
public void testFormatFixed() {
assertExprReturns(
"Format(12.2, \"#,##0.00\")",
"12.20");
}
public void testFormatVariable() {
assertExprReturns(
"Format(1234.5, \"#,#\" || \"#0.00\")",
"1,234.50");
}
public void testFormatMember() {
assertExprReturns(
"Format([Store].[USA].[CA], \"#,#\" || \"#0.00\")",
"74,748.00");
}
public void testIIf() {
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, \"Yes\",\"No\")",
"Yes");
}
public void testIIfWithNullAndNumber() {
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, null,20)",
"");
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, 20,null)",
"20");
}
public void testIIfWithStringAndNull()
{
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, null,\"foo\")",
"");
assertExprReturns(
"IIf(([Measures].[Unit Sales],[Product].[Drink].[Alcoholic Beverages].[Beer and Wine]) > 100, \"foo\",null)",
"foo");
}
public void testIsEmptyWithNull()
{
assertExprReturns(
"iif (isempty(null), \"is empty\", \"not is empty\")",
"is empty");
assertExprReturns("iif (isempty(null), 1, 2)", "1");
}
public void testIIfMember() {
assertAxisReturns(
"IIf(1 > 2,[Store].[USA],[Store].[Canada].[BC])",
"[Store].[Canada].[BC]");
}
public void testIIfLevel() {
assertExprReturns(
"IIf(1 > 2, [Store].[Store Country],[Store].[Store City]).Name",
"Store City");
}
public void testIIfHierarchy() {
assertExprReturns(
"IIf(1 > 2, [Time], [Store]).Name",
"Store");
// Call Iif(<Logical>, <Dimension>, <Hierarchy>). Argument #3, the
// hierarchy [Time.Weekly] is implicitly converted to
// the dimension [Time] to match argument #2 which is a dimension.
assertExprReturns(
"IIf(1 > 2, [Time], [Time.Weekly]).Name",
"Time");
}
public void testIIfDimension() {
assertExprReturns(
"IIf(1 > 2, [Store], [Time]).Name",
"Time");
}
public void testIIfSet() {
assertAxisReturns(
"IIf(1 > 2, {[Store].[USA], [Store].[USA].[CA]}, {[Store].[Mexico], [Store].[USA].[OR]})",
"[Store].[Mexico]\n"
+ "[Store].[USA].[OR]");
}
public void testDimensionCaption() {
assertExprReturns("[Time].[1997].Dimension.Caption", "Time");
}
public void testHierarchyCaption() {
assertExprReturns("[Time].[1997].Hierarchy.Caption", "Time");
}
public void testLevelCaption() {
assertExprReturns("[Time].[1997].Level.Caption", "Year");
}
public void testMemberCaption() {
assertExprReturns("[Time].[1997].Caption", "1997");
}
public void testDimensionName() {
assertExprReturns("[Time].[1997].Dimension.Name", "Time");
}
public void testHierarchyName() {
assertExprReturns("[Time].[1997].Hierarchy.Name", "Time");
}
public void testLevelName() {
assertExprReturns("[Time].[1997].Level.Name", "Year");
}
public void testMemberName() {
assertExprReturns("[Time].[1997].Name", "1997");
// dimension name
assertExprReturns("[Store].Name", "Store");
// member name
assertExprReturns("[Store].DefaultMember.Name", "All Stores");
if (isDefaultNullMemberRepresentation()) {
// name of null member
assertExprReturns("[Store].Parent.Name", "#null");
}
}
public void testDimensionUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.Dimension.UniqueName",
"[Gender]");
}
public void testHierarchyUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.Hierarchy.UniqueName",
"[Gender]");
}
public void testLevelUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.Level.UniqueName",
"[Gender].[(All)]");
}
public void testMemberUniqueName() {
assertExprReturns(
"[Gender].DefaultMember.UniqueName",
"[Gender].[All Gender]");
}
public void testMemberUniqueNameOfNull() {
if (isDefaultNullMemberRepresentation()) {
assertExprReturns(
"[Measures].[Unit Sales].FirstChild.UniqueName",
"[Measures].[#null]"); // MSOLAP gives "" here
}
}
public void testCoalesceEmptyDepends() {
getTestContext().assertExprDependsOn(
"coalesceempty([Time].[1997], [Gender].[M])",
TestContext.allHiers());
String s1 = TestContext.allHiersExcept("[Measures]", "[Time]");
getTestContext().assertExprDependsOn(
"coalesceempty(([Measures].[Unit Sales], [Time].[1997]),"
+ " ([Measures].[Store Sales], [Time].[1997].[Q2]))",
s1);
}
public void testCoalesceEmpty() {
// [DF] is all null and [WA] has numbers for 1997 but not for 1998.
Result result = executeQuery(
"with\n"
+ " member Measures.[Coal1] as 'coalesceempty(([Time].[1997], Measures.[Store Sales]), ([Time].[1998], Measures.[Store Sales]))'\n"
+ " member Measures.[Coal2] as 'coalesceempty(([Time].[1997], Measures.[Unit Sales]), ([Time].[1998], Measures.[Unit Sales]))'\n"
+ "select \n"
+ " {Measures.[Coal1], Measures.[Coal2]} on columns,\n"
+ " {[Store].[All Stores].[Mexico].[DF], [Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]");
checkDataResults(
new Double[][]{
new Double[]{null, null},
new Double[]{new Double(263793.22), new Double(124366)}
},
result,
0.001);
result = executeQuery(
"with\n"
+ " member Measures.[Sales Per Customer] as 'Measures.[Sales Count] / Measures.[Customer Count]'\n"
+ " member Measures.[Coal] as 'coalesceempty(([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " Measures.[Sales Per Customer])'\n"
+ "select \n"
+ " {Measures.[Sales Per Customer], Measures.[Coal]} on columns,\n"
+ " {[Store].[All Stores].[Mexico].[DF], [Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]\n"
+ "where\n"
+ " ([Time].[1997].[Q2])");
checkDataResults(
new Double[][]{
new Double[]{null, null},
new Double[]{new Double(8.963), new Double(8.963)}
},
result,
0.001);
result = executeQuery(
"with\n"
+ " member Measures.[Sales Per Customer] as 'Measures.[Sales Count] / Measures.[Customer Count]'\n"
+ " member Measures.[Coal] as 'coalesceempty(([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " ([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " Measures.[Sales Per Customer])'\n"
+ "select \n"
+ " {Measures.[Sales Per Customer], Measures.[Coal]} on columns,\n"
+ " {[Store].[All Stores].[Mexico].[DF], [Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]\n"
+ "where\n"
+ " ([Time].[1997].[Q2])");
checkDataResults(
new Double[][]{
new Double[]{null, null},
new Double[]{new Double(8.963), new Double(8.963)}
},
result,
0.001);
}
public void testBrokenContextBug() {
Result result = executeQuery(
"with\n"
+ " member Measures.[Sales Per Customer] as 'Measures.[Sales Count] / Measures.[Customer Count]'\n"
+ " member Measures.[Coal] as 'coalesceempty(([Measures].[Sales Per Customer], [Store].[All Stores].[Mexico].[DF]),\n"
+ " Measures.[Sales Per Customer])'\n"
+ "select \n"
+ " {Measures.[Coal]} on columns,\n"
+ " {[Store].[All Stores].[USA].[WA]} on rows\n"
+ "from \n"
+ " [Sales]\n"
+ "where\n"
+ " ([Time].[1997].[Q2])");
checkDataResults(new Double[][]{{new Double(8.963)}}, result, 0.001);
}
/**
* Tests the function <code><Set>.Item(<Integer>)</code>.
*/
public void testSetItemInt() {
assertAxisReturns(
"{[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(0)",
"[Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]");
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(2)",
"[Customers].[USA].[CA]");
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(100 / 50 - 1)",
"[Customers].[USA].[WA]");
assertAxisReturns(
"{([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA]),"
+ "([Time].[1997].[Q1].[2], [Customers].[All Customers].[USA].[WA]),"
+ "([Time].[1997].[Q1].[3], [Customers].[All Customers].[USA].[CA]),"
+ "([Time].[1997].[Q2].[4], [Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian])}"
+ ".Item(100 / 50 - 1)",
"{[Time].[1997].[Q1].[2], [Customers].[USA].[WA]}");
// given index out of bounds, item returns null
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(-1)",
"");
// given index out of bounds, item returns null
assertAxisReturns(
"{[Customers].[All Customers].[USA],"
+ "[Customers].[All Customers].[USA].[WA],"
+ "[Customers].[All Customers].[USA].[CA],"
+ "[Customers].[All Customers].[USA].[OR].[Lebanon].[Mary Frances Christian]}.Item(4)",
"");
}
/**
* Tests the function <code><Set>.Item(<String> [,...])</code>.
*/
public void testSetItemString() {
assertAxisReturns(
"{[Gender].[M], [Gender].[F]}.Item(\"M\")",
"[Gender].[M]");
assertAxisReturns(
"{CrossJoin([Gender].Members, [Marital Status].Members)}.Item(\"M\", \"S\")",
"{[Gender].[M], [Marital Status].[S]}");
// MSAS fails with "duplicate dimensions across (independent) axes".
// (That's a bug in MSAS.)
assertAxisReturns(
"{CrossJoin([Gender].Members, [Marital Status].Members)}.Item(\"M\", \"M\")",
"{[Gender].[M], [Marital Status].[M]}");
// None found.
assertAxisReturns(
"{[Gender].[M], [Gender].[F]}.Item(\"X\")", "");
assertAxisReturns(
"{CrossJoin([Gender].Members, [Marital Status].Members)}.Item(\"M\", \"F\")",
"");
assertAxisReturns(
"CrossJoin([Gender].Members, [Marital Status].Members).Item(\"S\", \"M\")",
"");
assertAxisThrows(
"CrossJoin([Gender].Members, [Marital Status].Members).Item(\"M\")",
"Argument count does not match set's cardinality 2");
}
public void testTuple() {
assertExprReturns(
"([Gender].[M], "
+ "[Time].[Time].Children.Item(2), "
+ "[Measures].[Unit Sales])",
"33,249");
// Calc calls MemberValue with 3 args -- more efficient than
// constructing a tuple.
assertExprCompilesTo(
"([Gender].[M], [Time].[Time].Children.Item(2), [Measures].[Unit Sales])",
"MemberArrayValueCalc(name=MemberArrayValueCalc, class=class mondrian.calc.impl.MemberArrayValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Gender].[M]>, resultStyle=VALUE_NOT_NULL, value=[Gender].[M])\n"
+ " Item(name=Item, class=class mondrian.olap.fun.SetItemFunDef$5, type=MemberType<hierarchy=[Time]>, resultStyle=VALUE)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Time]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Time], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Time]>, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=DecimalType(0), resultStyle=VALUE_NOT_NULL, value=2)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n");
}
/**
* Tests whether the tuple operator can be applied to arguments of various
* types. See bug 1491699
* "ClassCastException in mondrian.calc.impl.GenericCalc.evaluat".
*/
public void testTupleArgTypes() {
// can coerce dimensions (if they have a unique hierarchy) and
// hierarchies to members
assertExprReturns(
"([Gender], [Time].[Time])",
"266,773");
// can coerce hierarchy to member
assertExprReturns(
"([Gender].[M], " + TimeWeekly + ")", "135,215");
// cannot coerce level to member
assertAxisThrows(
"{([Gender].[M], [Store].[Store City])}",
"No function matches signature '(<Member>, <Level>)'");
// coerce args (hierarchy, member, member, dimension)
assertAxisReturns(
"{([Time.Weekly], [Measures].[Store Sales], [Marital Status].[M], [Promotion Media])}",
"{[Time].[Weekly].[All Weeklys], [Measures].[Store Sales], [Marital Status].[M], [Promotion Media].[All Media]}");
// usage of different hierarchies in the [Time] dimension
assertAxisReturns(
"{([Time.Weekly], [Measures].[Store Sales], [Marital Status].[M], [Time].[Time])}",
"{[Time].[Weekly].[All Weeklys], [Measures].[Store Sales], [Marital Status].[M], [Time].[1997]}");
// two usages of the [Time].[Weekly] hierarchy
if (MondrianProperties.instance().SsasCompatibleNaming.get()) {
assertAxisThrows(
"{([Time].[Weekly], [Measures].[Store Sales], [Marital Status].[M], [Time].[Weekly])}",
"Tuple contains more than one member of hierarchy '[Time].[Weekly]'.");
} else {
assertAxisThrows(
"{([Time.Weekly], [Measures].[Store Sales], [Marital Status].[M], [Time.Weekly])}",
"Tuple contains more than one member of hierarchy '[Time.Weekly]'.");
}
// cannot coerce integer to member
assertAxisThrows(
"{([Gender].[M], 123)}",
"No function matches signature '(<Member>, <Numeric Expression>)'");
}
public void testTupleItem() {
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(2)",
"[Gender].[M]");
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(1)",
"[Customers].[USA].[OR]");
assertAxisReturns(
"{[Time].[1997].[Q1].[1]}.item(0)",
"[Time].[1997].[Q1].[1]");
assertAxisReturns(
"{[Time].[1997].[Q1].[1]}.Item(0).Item(0)",
"[Time].[1997].[Q1].[1]");
// given out of bounds index, item returns null
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(-1)",
"");
// given out of bounds index, item returns null
assertAxisReturns(
"([Time].[1997].[Q1].[1], [Customers].[All Customers].[USA].[OR], [Gender].[All Gender].[M]).item(500)",
"");
// empty set
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0)",
"");
// empty set of unknown type
assertExprReturns(
"{}.Item(3)",
"");
// past end of set
assertExprReturns(
"{[Gender].members}.Item(4)",
"");
// negative index
assertExprReturns(
"{[Gender].members}.Item(-50)",
"");
}
public void testTupleAppliedToUnknownHierarchy() {
// manifestation of bug 1735821
assertQueryReturns(
"with \n"
+ "member [Product].[Test] as '([Product].[Food],Dimensions(0).defaultMember)' \n"
+ "select \n"
+ "{[Product].[Test], [Product].[Food]} on columns, \n"
+ "{[Measures].[Store Sales]} on rows \n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[Test]}\n"
+ "{[Product].[Food]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Row #0: 191,940.00\n"
+ "Row #0: 409,035.59\n");
}
public void testTupleDepends()
{
getTestContext().assertMemberExprDependsOn(
"([Store].[USA], [Gender].[F])", "{}");
getTestContext().assertMemberExprDependsOn(
"([Store].[USA], [Gender])", "{[Gender]}");
// in a scalar context, the expression depends on everything except
// the explicitly stated dimensions
getTestContext().assertExprDependsOn(
"([Store].[USA], [Gender])",
TestContext.allHiersExcept("[Store]"));
// The result should be all dims except [Gender], but there's a small
// bug in MemberValueCalc.dependsOn where we escalate 'might depend' to
// 'depends' and we return that it depends on all dimensions.
getTestContext().assertExprDependsOn(
"(Dimensions('Store').CurrentMember, [Gender].[F])",
TestContext.allHiers());
}
public void testItemNull()
{
// In the following queries, MSAS returns 'Formula error - object type
// is not valid - in an <object> base class. An error occurred during
// attempt to get cell value'. This is because in MSAS, Item is a COM
// function, and COM doesn't like null pointers.
//
// Mondrian represents null members as actual objects, so its behavior
// is different.
// MSAS returns error here.
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0).Dimension.Name",
"Gender");
// MSAS returns error here.
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0).Parent",
"");
assertExprReturns(
"(Filter([Store].members, 0 = 0).Item(0).Item(0),"
+ "Filter([Store].members, 0 = 0).Item(0).Item(0))",
"266,773");
if (isDefaultNullMemberRepresentation()) {
// MSAS returns error here.
assertExprReturns(
"Filter([Gender].members, 1 = 0).Item(0).Name",
"#null");
}
}
public void testTupleNull() {
// if a tuple contains any null members, it evaluates to null
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " { ([Gender].[M], [Store]),\n"
+ " ([Gender].[F], [Store].parent),\n"
+ " ([Gender].parent, [Store])} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[M], [Store].[All Stores]}\n"
+ "Row #0: 135,215\n");
// the set function eliminates tuples which are wholly or partially
// null
assertAxisReturns(
"([Gender].parent, [Marital Status]),\n" // part null
+ " ([Gender].[M], [Marital Status].parent),\n" // part null
+ " ([Gender].parent, [Marital Status].parent),\n" // wholly null
+ " ([Gender].[M], [Marital Status])", // not null
"{[Gender].[M], [Marital Status].[All Marital Status]}");
if (isDefaultNullMemberRepresentation()) {
// The tuple constructor returns a null tuple if one of its
// arguments is null -- and the Item function returns null if the
// tuple is null.
assertExprReturns(
"([Gender].parent, [Marital Status]).Item(0).Name",
"#null");
assertExprReturns(
"([Gender].parent, [Marital Status]).Item(1).Name",
"#null");
}
}
private void checkDataResults(
Double[][] expected,
Result result,
final double tolerance)
{
int[] coords = new int[2];
for (int row = 0; row < expected.length; row++) {
coords[1] = row;
for (int col = 0; col < expected[0].length; col++) {
coords[0] = col;
Cell cell = result.getCell(coords);
final Double expectedValue = expected[row][col];
if (expectedValue == null) {
assertTrue("Expected null value", cell.isNull());
} else if (cell.isNull()) {
fail(
"Cell at (" + row + ", " + col
+ ") was null, but was expecting "
+ expectedValue);
} else {
assertEquals(
"Incorrect value returned at ("
+ row + ", " + col + ")",
expectedValue,
((Number) cell.getValue()).doubleValue(),
tolerance);
}
}
}
}
public void testLevelMemberExpressions() {
// Should return Beverly Hills in California.
assertAxisReturns(
"[Store].[Store City].[Beverly Hills]",
"[Store].[USA].[CA].[Beverly Hills]");
// There are two months named "1" in the time dimension: one
// for 1997 and one for 1998. <Level>.<Member> should return
// the first one.
assertAxisReturns("[Time].[Month].[1]", "[Time].[1997].[Q1].[1]");
// Shouldn't be able to find a member named "Q1" on the month level.
assertAxisThrows(
"[Time].[Month].[Q1]",
"MDX object '[Time].[Month].[Q1]' not found in cube");
}
public void testCaseTestMatch() {
assertExprReturns(
"CASE WHEN 1=0 THEN \"first\" WHEN 1=1 THEN \"second\" WHEN 1=2 THEN \"third\" ELSE \"fourth\" END",
"second");
}
public void testCaseTestMatchElse() {
assertExprReturns(
"CASE WHEN 1=0 THEN \"first\" ELSE \"fourth\" END",
"fourth");
}
public void testCaseTestMatchNoElse() {
assertExprReturns(
"CASE WHEN 1=0 THEN \"first\" END",
"");
}
/**
* Testcase for bug 1799391, "Case Test function throws class cast
* exception"
*/
public void testCaseTestReturnsMemberBug1799391() {
assertQueryReturns(
"WITH\n"
+ " MEMBER [Product].[CaseTest] AS\n"
+ " 'CASE\n"
+ " WHEN [Gender].CurrentMember IS [Gender].[M] THEN [Gender].[F]\n"
+ " ELSE [Gender].[F]\n"
+ " END'\n"
+ " \n"
+ "SELECT {[Product].[CaseTest]} ON 0, {[Gender].[M]} ON 1 FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[CaseTest]}\n"
+ "Axis #2:\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 131,558\n");
assertAxisReturns(
"CASE WHEN 1+1 = 2 THEN [Gender].[F] ELSE [Gender].[F].Parent END",
"[Gender].[F]");
// try case match for good measure
assertAxisReturns(
"CASE 1 WHEN 2 THEN [Gender].[F] ELSE [Gender].[F].Parent END",
"[Gender].[All Gender]");
}
public void testCaseMatch() {
assertExprReturns(
"CASE 2 WHEN 1 THEN \"first\" WHEN 2 THEN \"second\" WHEN 3 THEN \"third\" ELSE \"fourth\" END",
"second");
}
public void testCaseMatchElse() {
assertExprReturns(
"CASE 7 WHEN 1 THEN \"first\" ELSE \"fourth\" END",
"fourth");
}
public void testCaseMatchNoElse() {
assertExprReturns(
"CASE 8 WHEN 0 THEN \"first\" END",
"");
}
public void testCaseTypeMismatch() {
// type mismatch between case and else
assertAxisThrows(
"CASE 1 WHEN 1 THEN 2 ELSE \"foo\" END",
"No function matches signature");
// type mismatch between case and case
assertAxisThrows(
"CASE 1 WHEN 1 THEN 2 WHEN 2 THEN \"foo\" ELSE 3 END",
"No function matches signature");
// type mismatch between value and case
assertAxisThrows(
"CASE 1 WHEN \"foo\" THEN 2 ELSE 3 END",
"No function matches signature");
// non-boolean condition
assertAxisThrows(
"CASE WHEN 1 = 2 THEN 3 WHEN 4 THEN 5 ELSE 6 END",
"No function matches signature");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-853">
* bug MONDRIAN-853, "When using CASE WHEN in a CalculatedMember values are
* not returned the way expected"</a>.
*/
public void testCaseTuple() {
// The case in the bug, simplified. With the bug, returns a member array
// "[Lmondrian.olap.Member;@151b0a5". Type deduction should realize
// that the result is a scalar, therefore a tuple (represented by a
// member array) needs to be evaluated to a scalar. I think that if we
// get the type deduction right, the MDX exp compiler will handle the
// rest.
if (false)
assertExprReturns(
"case 1 when 0 then 1.5\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"135,215");
// "case when" variant always worked
assertExprReturns(
"case when 1=0 then 1.5\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"135,215");
// case 2: cannot deduce type (tuple x) vs. (tuple y). Should be able
// to deduce that the result type is tuple-type<member-type<Gender>,
// member-type<Measures>>.
if (false)
assertExprReturns(
"case when 1=0 then ([Gender].[M], [Measures].[Store Sales])\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"xxx");
// case 3: mixture of member & tuple. Should be able to deduce that
// result type is an expression.
if (false)
assertExprReturns(
"case when 1=0 then ([Measures].[Store Sales])\n"
+ " else ([Gender].[M], [Measures].[Unit Sales]) end",
"xxx");
}
public void testPropertiesExpr() {
assertExprReturns(
"[Store].[USA].[CA].[Beverly Hills].[Store 6].Properties(\"Store Type\")",
"Gourmet Supermarket");
}
/**
* Test case for bug
* <a href="http://jira.pentaho.com/browse/MONDRIAN-1227">MONDRIAN-1227,
* "Properties function does not implicitly convert dimension to member; has
* documentation typos"</a>.
*/
public void testPropertiesOnDimension() {
// [Store] is a dimension. When called with a property like FirstChild,
// it is implicitly converted to a member.
assertAxisReturns("[Store].FirstChild", "[Store].[Canada]");
// The same should happen with the <Member>.Properties(<String>)
// function; now the bug is fixed, it does. Dimension is implicitly
// converted to member.
assertExprReturns(
"[Store].Properties('MEMBER_UNIQUE_NAME')",
"[Store].[All Stores]");
// Hierarchy is implicitly converted to member.
assertExprReturns(
"[Store].[USA].Hierarchy.Properties('MEMBER_UNIQUE_NAME')",
"[Store].[All Stores]");
}
/**
* Tests that non-existent property throws an error. *
*/
public void testPropertiesNonExistent() {
assertExprThrows(
"[Store].[USA].[CA].[Beverly Hills].[Store 6].Properties(\"Foo\")",
"Property 'Foo' is not valid for");
}
public void testPropertiesFilter() {
Result result = executeQuery(
"SELECT { [Store Sales] } ON COLUMNS,\n"
+ " TOPCOUNT(Filter( [Store].[Store Name].Members,\n"
+ " [Store].CurrentMember.Properties(\"Store Type\") = \"Supermarket\"),\n"
+ " 10, [Store Sales]) ON ROWS\n"
+ "FROM [Sales]");
Assert.assertEquals(8, result.getAxes()[1].getPositions().size());
}
public void testPropertyInCalculatedMember() {
Result result = executeQuery(
"WITH MEMBER [Measures].[Store Sales per Sqft]\n"
+ "AS '[Measures].[Store Sales] / "
+ " [Store].CurrentMember.Properties(\"Store Sqft\")'\n"
+ "SELECT \n"
+ " {[Measures].[Unit Sales], [Measures].[Store Sales per Sqft]} ON COLUMNS,\n"
+ " {[Store].[Store Name].members} ON ROWS\n"
+ "FROM Sales");
Member member;
Cell cell;
member = result.getAxes()[1].getPositions().get(18).get(0);
Assert.assertEquals(
"[Store].[USA].[WA].[Bellingham].[Store 2]",
member.getUniqueName());
cell = result.getCell(new int[]{0, 18});
Assert.assertEquals("2,237", cell.getFormattedValue());
cell = result.getCell(new int[]{1, 18});
Assert.assertEquals(".17", cell.getFormattedValue());
member = result.getAxes()[1].getPositions().get(3).get(0);
Assert.assertEquals(
"[Store].[Mexico].[DF].[San Andres].[Store 21]",
member.getUniqueName());
cell = result.getCell(new int[]{0, 3});
Assert.assertEquals("", cell.getFormattedValue());
cell = result.getCell(new int[]{1, 3});
Assert.assertEquals("", cell.getFormattedValue());
}
public void testOpeningPeriod() {
assertAxisReturns(
"OpeningPeriod([Time].[Month], [Time].[1997].[Q3])",
"[Time].[1997].[Q3].[7]");
assertAxisReturns(
"OpeningPeriod([Time].[Quarter], [Time].[1997])",
"[Time].[1997].[Q1]");
assertAxisReturns(
"OpeningPeriod([Time].[Year], [Time].[1997])", "[Time].[1997]");
assertAxisReturns(
"OpeningPeriod([Time].[Month], [Time].[1997])",
"[Time].[1997].[Q1].[1]");
assertAxisReturns(
"OpeningPeriod([Product].[Product Name], [Product].[All Products].[Drink])",
"[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]");
getTestContext().withCube("[Sales Ragged]").assertAxisReturns(
"OpeningPeriod([Store].[Store City], [Store].[All Stores].[Israel])",
"[Store].[Israel].[Israel].[Haifa]");
getTestContext().withCube("[Sales Ragged]").assertAxisReturns(
"OpeningPeriod([Store].[Store State], [Store].[All Stores].[Israel])",
"");
// Default member is [Time].[1997].
assertAxisReturns(
"OpeningPeriod([Time].[Month])", "[Time].[1997].[Q1].[1]");
assertAxisReturns("OpeningPeriod()", "[Time].[1997].[Q1]");
TestContext testContext = getTestContext().withCube("[Sales Ragged]");
testContext.assertAxisThrows(
"OpeningPeriod([Time].[Year], [Store].[All Stores].[Israel])",
"The <level> and <member> arguments to OpeningPeriod must be "
+ "from the same hierarchy. The level was from '[Time]' but "
+ "the member was from '[Store]'.");
assertAxisThrows(
"OpeningPeriod([Store].[Store City])",
"The <level> and <member> arguments to OpeningPeriod must be "
+ "from the same hierarchy. The level was from '[Store]' but "
+ "the member was from '[Time]'.");
}
/**
* This tests new NULL functionality exception throwing
*
*/
public void testOpeningPeriodNull() {
assertAxisThrows(
"OpeningPeriod([Time].[Month], NULL)",
"Mondrian Error:Failed to parse query 'select {OpeningPeriod([Time].[Month], NULL)} on columns from Sales'");
}
public void testLastPeriods() {
assertAxisReturns(
"LastPeriods(0, [Time].[1998])", "");
assertAxisReturns(
"LastPeriods(1, [Time].[1998])", "[Time].[1998]");
assertAxisReturns(
"LastPeriods(-1, [Time].[1998])", "[Time].[1998]");
assertAxisReturns(
"LastPeriods(2, [Time].[1998])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(-2, [Time].[1997])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(5000, [Time].[1998])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(-5000, [Time].[1997])",
"[Time].[1997]\n" + "[Time].[1998]");
assertAxisReturns(
"LastPeriods(2, [Time].[1998].[Q2])",
"[Time].[1998].[Q1]\n" + "[Time].[1998].[Q2]");
assertAxisReturns(
"LastPeriods(4, [Time].[1998].[Q2])",
"[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1998].[Q1]\n"
+ "[Time].[1998].[Q2]");
assertAxisReturns(
"LastPeriods(-2, [Time].[1997].[Q2])",
"[Time].[1997].[Q2]\n" + "[Time].[1997].[Q3]");
assertAxisReturns(
"LastPeriods(-4, [Time].[1997].[Q2])",
"[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1998].[Q1]");
assertAxisReturns(
"LastPeriods(5000, [Time].[1998].[Q2])",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]\n"
+ "[Time].[1997].[Q4]\n"
+ "[Time].[1998].[Q1]\n"
+ "[Time].[1998].[Q2]");
assertAxisReturns(
"LastPeriods(-5000, [Time].[1998].[Q2])",
"[Time].[1998].[Q2]\n"
+ "[Time].[1998].[Q3]\n"
+ "[Time].[1998].[Q4]");
assertAxisReturns(
"LastPeriods(2, [Time].[1998].[Q2].[5])",
"[Time].[1998].[Q2].[4]\n" + "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(12, [Time].[1998].[Q2].[5])",
"[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]\n"
+ "[Time].[1998].[Q1].[1]\n"
+ "[Time].[1998].[Q1].[2]\n"
+ "[Time].[1998].[Q1].[3]\n"
+ "[Time].[1998].[Q2].[4]\n"
+ "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(-2, [Time].[1998].[Q2].[4])",
"[Time].[1998].[Q2].[4]\n" + "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(-12, [Time].[1997].[Q2].[6])",
"[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]\n"
+ "[Time].[1997].[Q3].[8]\n"
+ "[Time].[1997].[Q3].[9]\n"
+ "[Time].[1997].[Q4].[10]\n"
+ "[Time].[1997].[Q4].[11]\n"
+ "[Time].[1997].[Q4].[12]\n"
+ "[Time].[1998].[Q1].[1]\n"
+ "[Time].[1998].[Q1].[2]\n"
+ "[Time].[1998].[Q1].[3]\n"
+ "[Time].[1998].[Q2].[4]\n"
+ "[Time].[1998].[Q2].[5]");
assertAxisReturns(
"LastPeriods(2, [Gender].[M])",
"[Gender].[F]\n" + "[Gender].[M]");
assertAxisReturns(
"LastPeriods(-2, [Gender].[F])",
"[Gender].[F]\n" + "[Gender].[M]");
assertAxisReturns(
"LastPeriods(2, [Gender])", "[Gender].[All Gender]");
assertAxisReturns(
"LastPeriods(2, [Gender].Parent)", "");
}
public void testParallelPeriod() {
assertAxisReturns(
"parallelperiod([Time].[Quarter], 1, [Time].[1998].[Q1])",
"[Time].[1997].[Q4]");
assertAxisReturns(
"parallelperiod([Time].[Quarter], -1, [Time].[1997].[Q1])",
"[Time].[1997].[Q2]");
assertAxisReturns(
"parallelperiod([Time].[Year], 1, [Time].[1998].[Q1])",
"[Time].[1997].[Q1]");
assertAxisReturns(
"parallelperiod([Time].[Year], 1, [Time].[1998].[Q1].[1])",
"[Time].[1997].[Q1].[1]");
// No args, therefore finds parallel period to [Time].[1997], which
// would be [Time].[1996], except that that doesn't exist, so null.
assertAxisReturns("ParallelPeriod()", "");
// Parallel period to [Time].[1997], which would be [Time].[1996],
// except that that doesn't exist, so null.
assertAxisReturns(
"ParallelPeriod([Time].[Year], 1, [Time].[1997])", "");
// one parameter, level 2 above member
if (isDefaultNullMemberRepresentation()) {
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Year]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3].[8]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[#null]\n");
}
// one parameter, level 1 above member
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Quarter]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3].[8]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[1997].[Q2].[5]\n");
// one parameter, level same as member
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Month]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3].[8]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[1997].[Q3].[7]\n");
// one parameter, level below member
if (isDefaultNullMemberRepresentation()) {
assertQueryReturns(
"WITH MEMBER [Measures].[Foo] AS \n"
+ " ' ParallelPeriod([Time].[Month]).UniqueName '\n"
+ "SELECT {[Measures].[Foo]} ON COLUMNS\n"
+ "FROM [Sales]\n"
+ "WHERE [Time].[1997].[Q3]",
"Axis #0:\n"
+ "{[Time].[1997].[Q3]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: [Time].[#null]\n");
}
}
public void _testParallelPeriodThrowsException() {
assertQueryThrows(
"select {parallelperiod([Time].[Year], 1)} on columns "
+ "from [Sales] where ([Time].[1998].[Q1].[2])",
"This should say something about Time appearing on two different axes (slicer an columns)");
}
public void testParallelPeriodDepends() {
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Time].[Quarter], 2.0)", "{[Time]}");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Time].[Quarter], 2.0, [Time].[1997].[Q3])", "{}");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod()",
"{[Time]}");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Product].[Food])", "{[Product]}");
// [Gender].[M] is used here as a numeric expression!
// The numeric expression DOES depend upon [Product].
// The expression as a whole depends upon everything except [Gender].
String s1 = TestContext.allHiersExcept("[Gender]");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Product].[Product Family], [Gender].[M], [Product].[Food])",
s1);
// As above
String s11 = TestContext.allHiersExcept("[Gender]");
getTestContext().assertMemberExprDependsOn(
"ParallelPeriod([Product].[Product Family], [Gender].[M])", s11);
getTestContext().assertSetExprDependsOn(
"parallelperiod([Time].[Time].CurrentMember)",
"{[Time]}");
}
public void testParallelPeriodLevelLag() {
assertQueryReturns(
"with member [Measures].[Prev Unit Sales] as "
+ " '([Measures].[Unit Sales], parallelperiod([Time].[Quarter], 2))' "
+ "select "
+ " crossjoin({[Measures].[Unit Sales], [Measures].[Prev Unit Sales]}, {[Marital Status].[All Marital Status].children}) on columns, "
+ " {[Time].[1997].[Q3]} on rows "
+ "from "
+ " [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales], [Marital Status].[M]}\n"
+ "{[Measures].[Unit Sales], [Marital Status].[S]}\n"
+ "{[Measures].[Prev Unit Sales], [Marital Status].[M]}\n"
+ "{[Measures].[Prev Unit Sales], [Marital Status].[S]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q3]}\n"
+ "Row #0: 32,815\n"
+ "Row #0: 33,033\n"
+ "Row #0: 33,101\n"
+ "Row #0: 33,190\n");
}
public void testParallelPeriodLevel() {
assertQueryReturns(
"with "
+ " member [Measures].[Prev Unit Sales] as "
+ " '([Measures].[Unit Sales], parallelperiod([Time].[Quarter]))' "
+ "select "
+ " crossjoin({[Measures].[Unit Sales], [Measures].[Prev Unit Sales]}, {[Marital Status].[All Marital Status].[M]}) on columns, "
+ " {[Time].[1997].[Q3].[8]} on rows "
+ "from "
+ " [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales], [Marital Status].[M]}\n"
+ "{[Measures].[Prev Unit Sales], [Marital Status].[M]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "Row #0: 10,957\n"
+ "Row #0: 10,280\n");
}
public void testPlus() {
getTestContext().assertExprDependsOn("1 + 2", "{}");
String s1 = TestContext.allHiersExcept("[Measures]", "[Gender]");
getTestContext().assertExprDependsOn(
"([Measures].[Unit Sales], [Gender].[F]) + 2", s1);
assertExprReturns("1+2", "3");
assertExprReturns("5 + " + NullNumericExpr, "5"); // 5 + null --> 5
assertExprReturns(NullNumericExpr + " + " + NullNumericExpr, "");
assertExprReturns(NullNumericExpr + " + 0", "0");
}
public void testMinus() {
assertExprReturns("1-3", "-2");
assertExprReturns("5 - " + NullNumericExpr, "5"); // 5 - null --> 5
assertExprReturns(NullNumericExpr + " - - 2", "2");
assertExprReturns(NullNumericExpr + " - " + NullNumericExpr, "");
}
public void testMinus_bug1234759()
{
assertQueryReturns(
"WITH MEMBER [Customers].[USAMinusMexico]\n"
+ "AS '([Customers].[All Customers].[USA] - [Customers].[All Customers].[Mexico])'\n"
+ "SELECT {[Measures].[Unit Sales]} ON COLUMNS,\n"
+ "{[Customers].[All Customers].[USA], [Customers].[All Customers].[Mexico],\n"
+ "[Customers].[USAMinusMexico]} ON ROWS\n"
+ "FROM [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[Mexico]}\n"
+ "{[Customers].[USAMinusMexico]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: \n"
+ "Row #2: 266,773\n"
// with bug 1234759, this was null
+ "");
}
public void testMinusAssociativity() {
// right-associative would give 11-(7-5) = 9, which is wrong
assertExprReturns("11-7-5", "-1");
}
public void testMultiply() {
assertExprReturns("4*7", "28");
assertExprReturns("5 * " + NullNumericExpr, ""); // 5 * null --> null
assertExprReturns(NullNumericExpr + " * - 2", "");
assertExprReturns(NullNumericExpr + " - " + NullNumericExpr, "");
}
public void testMultiplyPrecedence() {
assertExprReturns("3 + 4 * 5 + 6", "29");
assertExprReturns("5 * 24 / 4 * 2", "60");
assertExprReturns("48 / 4 / 2", "6");
}
/**
* Bug 774807 caused expressions to be mistaken for the crossjoin
* operator.
*/
public void testMultiplyBug774807() {
final String desiredResult =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[A]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #1: 319,494,143,605.90\n";
assertQueryReturns(
"WITH MEMBER [Measures].[A] AS\n"
+ " '([Measures].[Store Sales] * [Measures].[Store Sales])'\n"
+ "SELECT {[Store]} ON COLUMNS,\n"
+ " {[Measures].[Store Sales], [Measures].[A]} ON ROWS\n"
+ "FROM Sales", desiredResult);
// as above, no parentheses
assertQueryReturns(
"WITH MEMBER [Measures].[A] AS\n"
+ " '[Measures].[Store Sales] * [Measures].[Store Sales]'\n"
+ "SELECT {[Store]} ON COLUMNS,\n"
+ " {[Measures].[Store Sales], [Measures].[A]} ON ROWS\n"
+ "FROM Sales", desiredResult);
// as above, plus 0
assertQueryReturns(
"WITH MEMBER [Measures].[A] AS\n"
+ " '[Measures].[Store Sales] * [Measures].[Store Sales] + 0'\n"
+ "SELECT {[Store]} ON COLUMNS,\n"
+ " {[Measures].[Store Sales], [Measures].[A]} ON ROWS\n"
+ "FROM Sales", desiredResult);
}
public void testDivide() {
assertExprReturns("10 / 5", "2");
assertExprReturns(NullNumericExpr + " / - 2", "");
assertExprReturns(NullNumericExpr + " / " + NullNumericExpr, "");
boolean origNullDenominatorProducesNull =
MondrianProperties.instance().NullDenominatorProducesNull.get();
try {
// default behavior
MondrianProperties.instance().NullDenominatorProducesNull.set(
false);
assertExprReturns("-2 / " + NullNumericExpr, "Infinity");
assertExprReturns("0 / 0", "NaN");
assertExprReturns("-3 / (2 - 2)", "-Infinity");
assertExprReturns("NULL/1", "");
assertExprReturns("NULL/NULL", "");
assertExprReturns("1/NULL", "Infinity");
// when NullOrZeroDenominatorProducesNull is set to true
MondrianProperties.instance().NullDenominatorProducesNull.set(true);
assertExprReturns("-2 / " + NullNumericExpr, "");
assertExprReturns("0 / 0", "NaN");
assertExprReturns("-3 / (2 - 2)", "-Infinity");
assertExprReturns("NULL/1", "");
assertExprReturns("NULL/NULL", "");
assertExprReturns("1/NULL", "");
} finally {
MondrianProperties.instance().NullDenominatorProducesNull.set(
origNullDenominatorProducesNull);
}
}
public void testDividePrecedence() {
assertExprReturns("24 / 4 / 2 * 10 - -1", "31");
}
public void testMod() {
// the following tests are consistent with excel xp
assertExprReturns("mod(11, 3)", "2");
assertExprReturns("mod(-12, 3)", "0");
// can handle non-ints, using the formula MOD(n, d) = n - d * INT(n / d)
assertExprReturns("mod(7.2, 3)", 1.2, 0.0001);
assertExprReturns("mod(7.2, 3.2)", .8, 0.0001);
assertExprReturns("mod(7.2, -3.2)", -2.4, 0.0001);
// per Excel doc "sign of result is same as divisor"
assertExprReturns("mod(3, 2)", "1");
assertExprReturns("mod(-3, 2)", "1");
assertExprReturns("mod(3, -2)", "-1");
assertExprReturns("mod(-3, -2)", "-1");
assertExprThrows(
"mod(4, 0)",
"java.lang.ArithmeticException: / by zero");
assertExprThrows(
"mod(0, 0)",
"java.lang.ArithmeticException: / by zero");
}
public void testUnaryMinus() {
assertExprReturns("-3", "-3");
}
public void testUnaryMinusMember() {
assertExprReturns(
"- ([Measures].[Unit Sales],[Gender].[F])",
"-131,558");
}
public void testUnaryMinusPrecedence() {
assertExprReturns("1 - -10.5 * 2 -3", "19");
}
public void testNegativeZero() {
assertExprReturns("-0.0", "0");
}
public void testNegativeZero1() {
assertExprReturns("-(0.0)", "0");
}
public void testNegativeZeroSubtract() {
assertExprReturns("-0.0 - 0.0", "0");
}
public void testNegativeZeroMultiply() {
assertExprReturns("-1 * 0", "0");
}
public void testNegativeZeroDivide() {
assertExprReturns("-0.0 / 2", "0");
}
public void testString() {
// The String(Integer,Char) function requires us to implicitly cast a
// string to a char.
assertQueryReturns(
"with member measures.x as 'String(3, \"yahoo\")'\n"
+ "select measures.x on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[x]}\n"
+ "Row #0: yyy\n");
// String is converted to char by taking first character
assertExprReturns("String(3, \"yahoo\")", "yyy"); // SSAS agrees
// Integer is converted to char by converting to string and taking first
// character
if (Bug.Ssas2005Compatible) {
// SSAS2005 can implicitly convert an integer (32) to a string, and
// then to a char by taking the first character. Mondrian requires
// an explicit cast.
assertExprReturns("String(3, 32)", "333");
assertExprReturns("String(8, -5)", "--------");
} else {
assertExprReturns("String(3, Cast(32 as string))", "333");
assertExprReturns("String(8, Cast(-5 as string))", "--------");
}
// Error if length<0
assertExprReturns("String(0, 'x')", ""); // SSAS agrees
assertExprThrows(
"String(-1, 'x')", "NegativeArraySizeException"); // SSAS agrees
assertExprThrows(
"String(-200, 'x')", "NegativeArraySizeException"); // SSAS agrees
}
public void testStringConcat() {
assertExprReturns(
" \"foo\" || \"bar\" ",
"foobar");
}
public void testStringConcat2() {
assertExprReturns(
" \"foo\" || [Gender].[M].Name || \"\" ",
"fooM");
}
public void testAnd() {
assertBooleanExprReturns(" 1=1 AND 2=2 ", true);
}
public void testAnd2() {
assertBooleanExprReturns(" 1=1 AND 2=0 ", false);
}
public void testOr() {
assertBooleanExprReturns(" 1=0 OR 2=0 ", false);
}
public void testOr2() {
assertBooleanExprReturns(" 1=0 OR 0=0 ", true);
}
public void testOrAssociativity1() {
// Would give 'false' if OR were stronger than AND (wrong!)
assertBooleanExprReturns(" 1=1 AND 1=0 OR 1=1 ", true);
}
public void testOrAssociativity2() {
// Would give 'false' if OR were stronger than AND (wrong!)
assertBooleanExprReturns(" 1=1 OR 1=0 AND 1=1 ", true);
}
public void testOrAssociativity3() {
assertBooleanExprReturns(" (1=0 OR 1=1) AND 1=1 ", true);
}
public void testXor() {
assertBooleanExprReturns(" 1=1 XOR 2=2 ", false);
}
public void testXorAssociativity() {
// Would give 'false' if XOR were stronger than AND (wrong!)
assertBooleanExprReturns(" 1 = 1 AND 1 = 1 XOR 1 = 0 ", true);
}
public void testNonEmptyCrossJoin() {
// NonEmptyCrossJoin needs to evaluate measures to find out whether
// cells are empty, so it implicitly depends upon all dimensions.
String s1 = TestContext.allHiersExcept("[Store]");
getTestContext().assertSetExprDependsOn(
"NonEmptyCrossJoin([Store].[USA].Children, [Gender].Children)", s1);
assertAxisReturns(
"NonEmptyCrossJoin("
+ "[Customers].[All Customers].[USA].[CA].Children, "
+ "[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].Children)",
"{[Customers].[USA].[CA].[Bellflower], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Downey], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Glendale], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Glendale], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Grossmont], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Imperial Beach], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[La Jolla], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Lincoln Acres], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Lincoln Acres], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Long Beach], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Los Angeles], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Newport Beach], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Pomona], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[Pomona], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[San Gabriel], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[West Covina], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}\n"
+ "{[Customers].[USA].[CA].[West Covina], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Light Beer]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills], [Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good].[Good Imported Beer]}");
// empty set
assertAxisReturns(
"NonEmptyCrossJoin({Gender.Parent}, {Store.Parent})", "");
assertAxisReturns(
"NonEmptyCrossJoin({Store.Parent}, Gender.Children)", "");
assertAxisReturns("NonEmptyCrossJoin(Store.Members, {})", "");
// same dimension twice
// todo: should throw
if (false) {
assertAxisThrows(
"NonEmptyCrossJoin({Store.[USA]}, {Store.[USA].[CA]})",
"xxx");
}
}
public void testNot() {
assertBooleanExprReturns(" NOT 1=1 ", false);
}
public void testNotNot() {
assertBooleanExprReturns(" NOT NOT 1=1 ", true);
}
public void testNotAssociativity() {
assertBooleanExprReturns(" 1=1 AND NOT 1=1 OR NOT 1=1 AND 1=1 ", false);
}
public void testIsNull() {
assertBooleanExprReturns(" Store.[All Stores] IS NULL ", false);
assertBooleanExprReturns(" Store.[All Stores].parent IS NULL ", true);
}
public void testIsMember() {
assertBooleanExprReturns(
" Store.[USA].parent IS Store.[All Stores]", true);
assertBooleanExprReturns(
" [Store].[USA].[CA].parent IS [Store].[Mexico]", false);
}
public void testIsString() {
assertExprThrows(
" [Store].[USA].Name IS \"USA\" ",
"No function matches signature '<String> IS <String>'");
}
public void testIsNumeric() {
assertExprThrows(
" [Store].[USA].Level.Ordinal IS 25 ",
"No function matches signature '<Numeric Expression> IS <Numeric Expression>'");
}
public void testIsTuple() {
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Store.[USA], Gender.[M])", true);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Gender.[M], Store.[USA])", true);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Gender.[M], Store.[USA]) "
+ "OR [Gender] IS NULL",
true);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Gender.[M], Store.[USA]) "
+ "AND [Gender] IS NULL",
false);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Store.[USA], Gender.[F])",
false);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS (Store.[USA])",
false);
assertBooleanExprReturns(
" (Store.[USA], Gender.[M]) IS Store.[USA]",
false);
}
public void testIsLevel() {
assertBooleanExprReturns(
" Store.[USA].level IS Store.[Store Country] ", true);
assertBooleanExprReturns(
" Store.[USA].[CA].level IS Store.[Store Country] ", false);
}
public void testIsHierarchy() {
assertBooleanExprReturns(
" Store.[USA].hierarchy IS Store.[Mexico].hierarchy ", true);
assertBooleanExprReturns(
" Store.[USA].hierarchy IS Gender.[M].hierarchy ", false);
}
public void testIsDimension() {
assertBooleanExprReturns(" Store.[USA].dimension IS Store ", true);
assertBooleanExprReturns(" Gender.[M].dimension IS Store ", false);
}
public void testStringEquals() {
assertBooleanExprReturns(" \"foo\" = \"bar\" ", false);
}
public void testStringEqualsAssociativity() {
assertBooleanExprReturns(" \"foo\" = \"fo\" || \"o\" ", true);
}
public void testStringEqualsEmpty() {
assertBooleanExprReturns(" \"\" = \"\" ", true);
}
public void testEq() {
assertBooleanExprReturns(" 1.0 = 1 ", true);
assertBooleanExprReturns(
"[Product].CurrentMember.Level.Ordinal = 2.0", false);
checkNullOp("=");
}
public void testStringNe() {
assertBooleanExprReturns(" \"foo\" <> \"bar\" ", true);
}
public void testNe() {
assertBooleanExprReturns(" 2 <> 1.0 + 1.0 ", false);
checkNullOp("<>");
}
public void testNeInfinity() {
// Infinity does not equal itself
assertBooleanExprReturns("(1 / 0) <> (1 / 0)", false);
}
public void testLt() {
assertBooleanExprReturns(" 2 < 1.0 + 1.0 ", false);
checkNullOp("<");
}
public void testLe() {
assertBooleanExprReturns(" 2 <= 1.0 + 1.0 ", true);
checkNullOp("<=");
}
public void testGt() {
assertBooleanExprReturns(" 2 > 1.0 + 1.0 ", false);
checkNullOp(">");
}
public void testGe() {
assertBooleanExprReturns(" 2 > 1.0 + 1.0 ", false);
checkNullOp(">=");
}
private void checkNullOp(final String op) {
assertBooleanExprReturns(" 0 " + op + " " + NullNumericExpr, false);
assertBooleanExprReturns(NullNumericExpr + " " + op + " 0", false);
assertBooleanExprReturns(
NullNumericExpr + " " + op + " " + NullNumericExpr, false);
}
public void testDistinctTwoMembers() {
getTestContext().withCube("HR").assertAxisReturns(
"Distinct({[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[Sheri Nowmer].[Donna Arnold]})",
"[Employees].[Sheri Nowmer].[Donna Arnold]");
}
public void testDistinctThreeMembers() {
getTestContext().withCube("HR").assertAxisReturns(
"Distinct({[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Darren Stanz],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold]})",
"[Employees].[Sheri Nowmer].[Donna Arnold]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz]");
}
public void testDistinctFourMembers() {
getTestContext().withCube("HR").assertAxisReturns(
"Distinct({[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Darren Stanz],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Donna Arnold],"
+ "[Employees].[All Employees].[Sheri Nowmer].[Darren Stanz]})",
"[Employees].[Sheri Nowmer].[Donna Arnold]\n"
+ "[Employees].[Sheri Nowmer].[Darren Stanz]");
}
public void testDistinctTwoTuples() {
getTestContext().assertAxisReturns(
"Distinct({([Time].[1997],[Store].[All Stores].[Mexico]), "
+ "([Time].[1997], [Store].[All Stores].[Mexico])})",
"{[Time].[1997], [Store].[Mexico]}");
}
public void testDistinctSomeTuples() {
getTestContext().assertAxisReturns(
"Distinct({([Time].[1997],[Store].[All Stores].[Mexico]), "
+ "crossjoin({[Time].[1997]},{[Store].[All Stores].children})})",
"{[Time].[1997], [Store].[Mexico]}\n"
+ "{[Time].[1997], [Store].[Canada]}\n"
+ "{[Time].[1997], [Store].[USA]}");
}
/**
* Make sure that slicer is in force when expression is applied
* on axis, E.g. select filter([Customers].members, [Unit Sales] > 100)
* from sales where ([Time].[1998])
*/
public void testFilterWithSlicer() {
Result result = executeQuery(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " filter([Customers].[USA].children,\n"
+ " [Measures].[Unit Sales] > 20000) on rows\n"
+ "from Sales\n"
+ "where ([Time].[1997].[Q1])");
Axis rows = result.getAxes()[1];
// if slicer were ignored, there would be 3 rows
Assert.assertEquals(1, rows.getPositions().size());
Cell cell = result.getCell(new int[]{0, 0});
Assert.assertEquals("30,114", cell.getFormattedValue());
}
public void testFilterCompound() {
Result result = executeQuery(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " Filter(\n"
+ " CrossJoin(\n"
+ " [Gender].Children,\n"
+ " [Customers].[USA].Children),\n"
+ " [Measures].[Unit Sales] > 9500) on rows\n"
+ "from Sales\n"
+ "where ([Time].[1997].[Q1])");
List<Position> rows = result.getAxes()[1].getPositions();
Assert.assertEquals(3, rows.size());
Assert.assertEquals("F", rows.get(0).get(0).getName());
Assert.assertEquals("WA", rows.get(0).get(1).getName());
Assert.assertEquals("M", rows.get(1).get(0).getName());
Assert.assertEquals("OR", rows.get(1).get(1).getName());
Assert.assertEquals("M", rows.get(2).get(0).getName());
Assert.assertEquals("WA", rows.get(2).get(1).getName());
}
public void testGenerateDepends() {
getTestContext().assertSetExprDependsOn(
"Generate([Product].CurrentMember.Children, Crossjoin({[Product].CurrentMember}, Crossjoin([Store].[Store State].Members, [Store Type].Members)), ALL)",
"{[Product]}");
getTestContext().assertSetExprDependsOn(
"Generate([Product].[All Products].Children, Crossjoin({[Product].CurrentMember}, Crossjoin([Store].[Store State].Members, [Store Type].Members)), ALL)",
"{}");
getTestContext().assertSetExprDependsOn(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Store].CurrentMember.Children})",
"{}");
getTestContext().assertSetExprDependsOn(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Gender].CurrentMember})",
"{[Gender]}");
getTestContext().assertSetExprDependsOn(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Gender].[M]})",
"{}");
}
public void testGenerate() {
assertAxisReturns(
"Generate({[Store].[USA], [Store].[USA].[CA]}, {[Store].CurrentMember.Children})",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]\n"
+ "[Store].[USA].[CA].[Alameda]\n"
+ "[Store].[USA].[CA].[Beverly Hills]\n"
+ "[Store].[USA].[CA].[Los Angeles]\n"
+ "[Store].[USA].[CA].[San Diego]\n"
+ "[Store].[USA].[CA].[San Francisco]");
}
public void testGenerateNonSet() {
// SSAS implicitly converts arg #2 to a set
assertAxisReturns(
"Generate({[Store].[USA], [Store].[USA].[CA]}, [Store].PrevMember, ALL)",
"[Store].[Mexico]\n"
+ "[Store].[Mexico].[Zacatecas]");
// SSAS implicitly converts arg #1 to a set
assertAxisReturns(
"Generate([Store].[USA], [Store].PrevMember, ALL)",
"[Store].[Mexico]");
}
public void testGenerateAll() {
assertAxisReturns(
"Generate({[Store].[USA].[CA], [Store].[USA].[OR].[Portland]},"
+ " Ascendants([Store].CurrentMember),"
+ " ALL)",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]");
}
public void testGenerateUnique() {
assertAxisReturns(
"Generate({[Store].[USA].[CA], [Store].[USA].[OR].[Portland]},"
+ " Ascendants([Store].CurrentMember))",
"[Store].[USA].[CA]\n"
+ "[Store].[USA]\n"
+ "[Store].[All Stores]\n"
+ "[Store].[USA].[OR].[Portland]\n"
+ "[Store].[USA].[OR]");
}
public void testGenerateUniqueTuple() {
assertAxisReturns(
"Generate({([Store].[USA].[CA],[Product].[All Products]), "
+ "([Store].[USA].[CA],[Product].[All Products])},"
+ "{([Store].CurrentMember, [Product].CurrentMember)})",
"{[Store].[USA].[CA], [Product].[All Products]}");
}
public void testGenerateCrossJoin() {
// Note that the different regions have different Top 2.
assertAxisReturns(
"Generate({[Store].[USA].[CA], [Store].[USA].[CA].[San Francisco]},\n"
+ " CrossJoin({[Store].CurrentMember},\n"
+ " TopCount([Product].[Brand Name].members, \n"
+ " 2,\n"
+ " [Measures].[Unit Sales])))",
"{[Store].[USA].[CA], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Hermanos]}\n"
+ "{[Store].[USA].[CA], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Tell Tale]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[Ebony]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Product].[Food].[Produce].[Vegetables].[Fresh Vegetables].[High Top]}");
}
public void testGenerateString() {
assertExprReturns(
"Generate({Time.[1997], Time.[1998]},"
+ " Time.[Time].CurrentMember.Name)",
"19971998");
assertExprReturns(
"Generate({Time.[1997], Time.[1998]},"
+ " Time.[Time].CurrentMember.Name, \" and \")",
"1997 and 1998");
}
public void testHead() {
assertAxisReturns(
"Head([Store].Children, 2)",
"[Store].[Canada]\n"
+ "[Store].[Mexico]");
}
public void testHeadNegative() {
assertAxisReturns(
"Head([Store].Children, 2 - 3)",
"");
}
public void testHeadDefault() {
assertAxisReturns(
"Head([Store].Children)",
"[Store].[Canada]");
}
public void testHeadOvershoot() {
assertAxisReturns(
"Head([Store].Children, 2 + 2)",
"[Store].[Canada]\n"
+ "[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testHeadEmpty() {
assertAxisReturns(
"Head([Gender].[F].Children, 2)",
"");
assertAxisReturns(
"Head([Gender].[F].Children)",
"");
}
/**
* Test case for bug 2488492, "Union between calc mem and head function
* throws exception"
*/
public void testHeadBug() {
assertQueryReturns(
"SELECT\n"
+ " UNION(\n"
+ " {([Customers].CURRENTMEMBER)},\n"
+ " HEAD(\n"
+ " {([Customers].CURRENTMEMBER)},\n"
+ " IIF(\n"
+ " COUNT(\n"
+ " FILTER(\n"
+ " DESCENDANTS(\n"
+ " [Customers].CURRENTMEMBER,\n"
+ " [Customers].[Country]),\n"
+ " [Measures].[Unit Sales] >= 66),\n"
+ " INCLUDEEMPTY)> 0,\n"
+ " 1,\n"
+ " 0)),\n"
+ " ALL)\n"
+ " ON AXIS(0)\n"
+ "FROM\n"
+ " [Sales]\n",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[All Customers]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 266,773\n");
assertQueryReturns(
"WITH\n"
+ " MEMBER\n"
+ " [Customers].[COG_OQP_INT_t2]AS '1',\n"
+ " SOLVE_ORDER = 65535\n"
+ "SELECT\n"
+ " UNION(\n"
+ " {([Customers].[COG_OQP_INT_t2])},\n"
+ " HEAD(\n"
+ " {([Customers].CURRENTMEMBER)},\n"
+ " IIF(\n"
+ " COUNT(\n"
+ " FILTER(\n"
+ " DESCENDANTS(\n"
+ " [Customers].CURRENTMEMBER,\n"
+ " [Customers].[Country]),\n"
+ " [Measures].[Unit Sales]>= 66),\n"
+ " INCLUDEEMPTY)> 0,\n"
+ " 1,\n"
+ " 0)),\n"
+ " ALL)\n"
+ " ON AXIS(0)\n"
+ "FROM\n"
+ " [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[COG_OQP_INT_t2]}\n"
+ "{[Customers].[All Customers]}\n"
+ "Row #0: 1\n"
+ "Row #0: 266,773\n");
// More minimal test case. Also demonstrates similar problem with Tail.
assertAxisReturns(
"Union(\n"
+ " Union(\n"
+ " Tail([Customers].[USA].[CA].Children, 2),\n"
+ " Head([Customers].[USA].[WA].Children, 2),\n"
+ " ALL),\n"
+ " Tail([Customers].[USA].[OR].Children, 2),"
+ " ALL)",
"[Customers].[USA].[CA].[West Covina]\n"
+ "[Customers].[USA].[CA].[Woodland Hills]\n"
+ "[Customers].[USA].[WA].[Anacortes]\n"
+ "[Customers].[USA].[WA].[Ballard]\n"
+ "[Customers].[USA].[OR].[W. Linn]\n"
+ "[Customers].[USA].[OR].[Woodburn]");
}
public void testHierarchize() {
assertAxisReturns(
"Hierarchize(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Drink],\n"
+ " [Product].[Non-Consumable],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]})",
"[Product].[All Products]\n"
+ "[Product].[Drink]\n"
+ "[Product].[Drink].[Dairy]\n"
+ "[Product].[Food]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Non-Consumable]");
}
public void testHierarchizePost() {
assertAxisReturns(
"Hierarchize(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " POST)",
"[Product].[Drink].[Dairy]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Food]\n"
+ "[Product].[All Products]");
}
public void testHierarchizePC() {
getTestContext().withCube("HR").assertAxisReturns(
"Hierarchize(\n"
+ " { Subset([Employees].Members, 90, 10),\n"
+ " Head([Employees].Members, 5) })",
"[Employees].[All Employees]\n"
+ "[Employees].[Sheri Nowmer]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Beverly Baker].[Shauna Wyro]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Leopoldo Renfro]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Donna Brockett]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Laurie Anderson]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Louis Gomez]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Melvin Glass]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Kristin Cohen]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Susan Kharman]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Gordon Kirschner]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Geneva Kouba]\n"
+ "[Employees].[Sheri Nowmer].[Derrick Whelply].[Pedro Castillo].[Lin Conley].[Paul Tays].[Cheryl Thorton].[Tricia Clark]");
}
public void testHierarchizeCrossJoinPre() {
assertAxisReturns(
"Hierarchize(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " [Gender].MEMBERS),\n"
+ " PRE)",
"{[Product].[All Products], [Gender].[All Gender]}\n"
+ "{[Product].[All Products], [Gender].[F]}\n"
+ "{[Product].[All Products], [Gender].[M]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[All Gender]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[F]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[M]}\n"
+ "{[Product].[Food], [Gender].[All Gender]}\n"
+ "{[Product].[Food], [Gender].[F]}\n"
+ "{[Product].[Food], [Gender].[M]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[All Gender]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[F]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[M]}");
}
public void testHierarchizeCrossJoinPost() {
assertAxisReturns(
"Hierarchize(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " [Gender].MEMBERS),\n"
+ " POST)",
"{[Product].[Drink].[Dairy], [Gender].[F]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[M]}\n"
+ "{[Product].[Drink].[Dairy], [Gender].[All Gender]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[F]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[M]}\n"
+ "{[Product].[Food].[Eggs], [Gender].[All Gender]}\n"
+ "{[Product].[Food], [Gender].[F]}\n"
+ "{[Product].[Food], [Gender].[M]}\n"
+ "{[Product].[Food], [Gender].[All Gender]}\n"
+ "{[Product].[All Products], [Gender].[F]}\n"
+ "{[Product].[All Products], [Gender].[M]}\n"
+ "{[Product].[All Products], [Gender].[All Gender]}");
}
/**
* Tests that the Hierarchize function works correctly when applied to
* a level whose ordering is determined by an 'ordinal' property.
* TODO: fix this test (bug 1220787)
*
* WG: Note that this is disabled right now due to its impact on other
* tests later on within the test suite, specifically XMLA tests that
* return a list of cubes. We could run this test after XMLA, or clear
* out the cache to solve this.
*/
public void testHierarchizeOrdinal() {
TestContext context = getTestContext().withCube("[Sales_Hierarchize]");
final Connection connection = context.getConnection();
connection.getSchema().createCube(
"<Cube name=\"Sales_Hierarchize\">\n"
+ " <Table name=\"sales_fact_1997\"/>\n"
+ " <Dimension name=\"Time_Alphabetical\" type=\"TimeDimension\" foreignKey=\"time_id\">\n"
+ " <Hierarchy hasAll=\"false\" primaryKey=\"time_id\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Year\" column=\"the_year\" type=\"Numeric\" uniqueMembers=\"true\"\n"
+ " levelType=\"TimeYears\"/>\n"
+ " <Level name=\"Quarter\" column=\"quarter\" uniqueMembers=\"false\"\n"
+ " levelType=\"TimeQuarters\"/>\n"
+ " <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " ordinalColumn=\"the_month\"\n"
+ " levelType=\"TimeMonths\"/>\n"
+ " </Hierarchy>\n"
+ " </Dimension>\n"
+ "\n"
+ " <Dimension name=\"Month_Alphabetical\" type=\"TimeDimension\" foreignKey=\"time_id\">\n"
+ " <Hierarchy hasAll=\"false\" primaryKey=\"time_id\">\n"
+ " <Table name=\"time_by_day\"/>\n"
+ " <Level name=\"Month\" column=\"month_of_year\" uniqueMembers=\"false\" type=\"Numeric\"\n"
+ " ordinalColumn=\"the_month\"\n"
+ " levelType=\"TimeMonths\"/>\n"
+ " </Hierarchy>\n"
+ " </Dimension>\n"
+ "\n"
+ " <Measure name=\"Unit Sales\" column=\"unit_sales\" aggregator=\"sum\"\n"
+ " formatString=\"Standard\"/>\n"
+ "</Cube>");
// The [Time_Alphabetical] is ordered alphabetically by month
context.assertAxisReturns(
"Hierarchize([Time_Alphabetical].members)",
"[Time_Alphabetical].[1997]\n"
+ "[Time_Alphabetical].[1997].[Q1]\n"
+ "[Time_Alphabetical].[1997].[Q1].[2]\n"
+ "[Time_Alphabetical].[1997].[Q1].[1]\n"
+ "[Time_Alphabetical].[1997].[Q1].[3]\n"
+ "[Time_Alphabetical].[1997].[Q2]\n"
+ "[Time_Alphabetical].[1997].[Q2].[4]\n"
+ "[Time_Alphabetical].[1997].[Q2].[6]\n"
+ "[Time_Alphabetical].[1997].[Q2].[5]\n"
+ "[Time_Alphabetical].[1997].[Q3]\n"
+ "[Time_Alphabetical].[1997].[Q3].[8]\n"
+ "[Time_Alphabetical].[1997].[Q3].[7]\n"
+ "[Time_Alphabetical].[1997].[Q3].[9]\n"
+ "[Time_Alphabetical].[1997].[Q4]\n"
+ "[Time_Alphabetical].[1997].[Q4].[12]\n"
+ "[Time_Alphabetical].[1997].[Q4].[11]\n"
+ "[Time_Alphabetical].[1997].[Q4].[10]\n"
+ "[Time_Alphabetical].[1998]\n"
+ "[Time_Alphabetical].[1998].[Q1]\n"
+ "[Time_Alphabetical].[1998].[Q1].[2]\n"
+ "[Time_Alphabetical].[1998].[Q1].[1]\n"
+ "[Time_Alphabetical].[1998].[Q1].[3]\n"
+ "[Time_Alphabetical].[1998].[Q2]\n"
+ "[Time_Alphabetical].[1998].[Q2].[4]\n"
+ "[Time_Alphabetical].[1998].[Q2].[6]\n"
+ "[Time_Alphabetical].[1998].[Q2].[5]\n"
+ "[Time_Alphabetical].[1998].[Q3]\n"
+ "[Time_Alphabetical].[1998].[Q3].[8]\n"
+ "[Time_Alphabetical].[1998].[Q3].[7]\n"
+ "[Time_Alphabetical].[1998].[Q3].[9]\n"
+ "[Time_Alphabetical].[1998].[Q4]\n"
+ "[Time_Alphabetical].[1998].[Q4].[12]\n"
+ "[Time_Alphabetical].[1998].[Q4].[11]\n"
+ "[Time_Alphabetical].[1998].[Q4].[10]");
// The [Month_Alphabetical] is a single-level hierarchy ordered
// alphabetically by month.
context.assertAxisReturns(
"Hierarchize([Month_Alphabetical].members)",
"[Month_Alphabetical].[4]\n"
+ "[Month_Alphabetical].[8]\n"
+ "[Month_Alphabetical].[12]\n"
+ "[Month_Alphabetical].[2]\n"
+ "[Month_Alphabetical].[1]\n"
+ "[Month_Alphabetical].[7]\n"
+ "[Month_Alphabetical].[6]\n"
+ "[Month_Alphabetical].[3]\n"
+ "[Month_Alphabetical].[5]\n"
+ "[Month_Alphabetical].[11]\n"
+ "[Month_Alphabetical].[10]\n"
+ "[Month_Alphabetical].[9]");
// clear the cache so that future tests don't fail that expect a
// specific set of cubes
TestContext.instance().flushSchemaCache();
}
public void testIntersectAll() {
// Note: duplicates retained from left, not from right; and order is
// preserved.
assertAxisReturns(
"Intersect({[Time].[1997].[Q2], [Time].[1997], [Time].[1997].[Q1], [Time].[1997].[Q2]}, "
+ "{[Time].[1998], [Time].[1997], [Time].[1997].[Q2], [Time].[1997]}, "
+ "ALL)",
"[Time].[1997].[Q2]\n"
+ "[Time].[1997]\n"
+ "[Time].[1997].[Q2]");
}
public void testIntersect() {
// Duplicates not preserved. Output in order that first duplicate
// occurred.
assertAxisReturns(
"Intersect(\n"
+ " {[Time].[1997].[Q2], [Time].[1997], [Time].[1997].[Q1], [Time].[1997].[Q2]}, "
+ "{[Time].[1998], [Time].[1997], [Time].[1997].[Q2], [Time].[1997]})",
"[Time].[1997].[Q2]\n"
+ "[Time].[1997]");
}
public void testIntersectTuples() {
assertAxisReturns(
"Intersect(\n"
+ " {([Time].[1997].[Q2], [Gender].[M]),\n"
+ " ([Time].[1997], [Gender].[F]),\n"
+ " ([Time].[1997].[Q1], [Gender].[M]),\n"
+ " ([Time].[1997].[Q2], [Gender].[M])},\n"
+ " {([Time].[1998], [Gender].[F]),\n"
+ " ([Time].[1997], [Gender].[F]),\n"
+ " ([Time].[1997].[Q2], [Gender].[M]),\n"
+ " ([Time].[1997], [Gender])})",
"{[Time].[1997].[Q2], [Gender].[M]}\n"
+ "{[Time].[1997], [Gender].[F]}");
}
public void testIntersectRightEmpty() {
assertAxisReturns(
"Intersect({[Time].[1997]}, {})",
"");
}
public void testIntersectLeftEmpty() {
assertAxisReturns(
"Intersect({}, {[Store].[USA].[CA]})",
"");
}
public void testOrderDepends() {
// Order(<Set>, <Value Expression>) depends upon everything
// <Value Expression> depends upon, except the dimensions of <Set>.
// Depends upon everything EXCEPT [Product], [Measures],
// [Marital Status], [Gender].
String s11 = TestContext.allHiersExcept(
"[Product]", "[Measures]", "[Marital Status]", "[Gender]");
getTestContext().assertSetExprDependsOn(
"Order("
+ " Crossjoin([Gender].MEMBERS, [Product].MEMBERS),"
+ " ([Measures].[Unit Sales], [Marital Status].[S]),"
+ " ASC)",
s11);
// Depends upon everything EXCEPT [Product], [Measures],
// [Marital Status]. Does depend upon [Gender].
String s12 = TestContext.allHiersExcept(
"[Product]", "[Measures]", "[Marital Status]");
getTestContext().assertSetExprDependsOn(
"Order("
+ " Crossjoin({[Gender].CurrentMember}, [Product].MEMBERS),"
+ " ([Measures].[Unit Sales], [Marital Status].[S]),"
+ " ASC)",
s12);
// Depends upon everything except [Measures].
String s13 = TestContext.allHiersExcept("[Measures]");
getTestContext().assertSetExprDependsOn(
"Order("
+ " Crossjoin("
+ " [Gender].CurrentMember.Children, "
+ " [Marital Status].CurrentMember.Children), "
+ " [Measures].[Unit Sales], "
+ " BDESC)",
s13);
String s1 = TestContext.allHiersExcept(
"[Measures]", "[Store]", "[Product]", "[Time]");
getTestContext().assertSetExprDependsOn(
" Order(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products].[Food].[Eggs],\n"
+ " [Product].[All Products].[Food].[Seafood],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]},\n"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]}),\n"
+ " ([Time].[1997].[Q1], [Measures].[Unit Sales]),\n"
+ " ASC)",
s1);
}
public void testOrderCalc() {
if (Util.Retrowoven) {
// If retrowoven, we don't use Iterable, so plans are different.
return;
}
// [Measures].[Unit Sales] is a constant member, so it is evaluated in
// a ContextCalc.
assertAxisCompilesTo(
"order([Product].children, [Measures].[Unit Sales])",
"ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n"
+ " CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " ValueCalc(name=ValueCalc, class=class mondrian.calc.impl.ValueCalc, type=SCALAR, resultStyle=VALUE)\n");
// [Time].[1997] is constant, and is evaluated in a ContextCalc.
// [Product].Parent is variable, and is evaluated inside the loop.
assertAxisCompilesTo(
"order([Product].children,"
+ " ([Time].[1997], [Product].CurrentMember.Parent))",
"ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Time].[1997]>, resultStyle=VALUE_NOT_NULL, value=[Time].[1997])\n"
+ " CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Parent(name=Parent, class=class mondrian.olap.fun.BuiltinFunTable$15$1, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n");
// No ContextCalc this time. All members are non-variable.
assertAxisCompilesTo(
"order([Product].children, [Product].CurrentMember.Parent)",
"CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Parent(name=Parent, class=class mondrian.olap.fun.BuiltinFunTable$15$1, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n");
// List expression is dependent on one of the constant calcs. It cannot
// be pulled up, so [Gender].[M] is not in the ContextCalc.
// Note that there is no CopyListCalc - because Filter creates its own
// mutable copy.
// Under JDK 1.4, needs an extra converter from list to iterator,
// because JDK 1.4 doesn't support the ITERABLE result style.
assertAxisCompilesTo(
"order(filter([Product].children, [Measures].[Unit Sales] > 1000), "
+ "([Gender].[M], [Measures].[Store Sales]))",
Util.Retrowoven
? ""
+ "ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Store Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Store Sales])\n"
+ " MemberCalcImpl(name=MemberCalcImpl, class=class mondrian.olap.fun.OrderFunDef$MemberCalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " MemberListIterCalc(name=MemberListIterCalc, class=class mondrian.calc.impl.AbstractExpCompiler$MemberListIterCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=ITERABLE)\n"
+ " ImmutableMemberListCalc(name=ImmutableMemberListCalc, class=class mondrian.olap.fun.FilterFunDef$ImmutableMemberListCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " >(name=>, class=class mondrian.olap.fun.BuiltinFunTable$63$1, type=BOOLEAN, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=NUMERIC, resultStyle=VALUE_NOT_NULL, value=1000.0)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Gender].[M]>, resultStyle=VALUE_NOT_NULL, value=[Gender].[M])\n"
: ""
+ "ContextCalc(name=ContextCalc, class=class mondrian.olap.fun.OrderFunDef$ContextCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Store Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Store Sales])\n"
+ " CalcImpl(name=CalcImpl, class=class mondrian.olap.fun.OrderFunDef$CalcImpl, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=MUTABLE_LIST, direction=ASC)\n"
+ " ImmutableIterCalc(name=ImmutableIterCalc, class=class mondrian.olap.fun.FilterFunDef$ImmutableIterCalc, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=ITERABLE)\n"
+ " Children(name=Children, class=class mondrian.olap.fun.BuiltinFunTable$22$1, type=SetType<MemberType<hierarchy=[Product]>>, resultStyle=LIST)\n"
+ " CurrentMemberFixed(hierarchy=[Product], name=CurrentMemberFixed, class=class mondrian.olap.fun.HierarchyCurrentMemberFunDef$FixedCalcImpl, type=MemberType<hierarchy=[Product]>, resultStyle=VALUE)\n"
+ " >(name=>, class=class mondrian.olap.fun.BuiltinFunTable$63$1, type=BOOLEAN, resultStyle=VALUE)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Measures].[Unit Sales]>, resultStyle=VALUE_NOT_NULL, value=[Measures].[Unit Sales])\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=NUMERIC, resultStyle=VALUE_NOT_NULL, value=1000.0)\n"
+ " MemberValueCalc(name=MemberValueCalc, class=class mondrian.calc.impl.MemberValueCalc, type=SCALAR, resultStyle=VALUE)\n"
+ " Literal(name=Literal, class=class mondrian.calc.impl.ConstantCalc, type=MemberType<member=[Gender].[M]>, resultStyle=VALUE_NOT_NULL, value=[Gender].[M])\n");
}
/**
* Verifies that the order function works with a defined member.
* See this forum post for additional information:
* http://forums.pentaho.com/showthread.php?p=179473#post179473
*/
public void testOrderWithMember() {
assertQueryReturns(
"with member [Measures].[Product Name Length] as "
+ "'LEN([Product].CurrentMember.Name)'\n"
+ "select {[Measures].[Product Name Length]} ON COLUMNS,\n"
+ "Order([Product].[All Products].Children, "
+ "[Measures].[Product Name Length], BASC) ON ROWS\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Product Name Length]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 4\n"
+ "Row #1: 5\n"
+ "Row #2: 14\n");
}
/**
* test case for bug # 1797159, Potential MDX Order Non Empty Problem
*
*/
public void testOrderNonEmpty() {
assertQueryReturns(
"select NON EMPTY [Gender].Members ON COLUMNS,\n"
+ "NON EMPTY Order([Product].[All Products].[Drink].Children,\n"
+ "[Gender].[All Gender].[F], ASC) ON ROWS\n"
+ "from [Sales]\n"
+ "where ([Customers].[All Customers].[USA].[CA].[San Francisco],\n"
+ " [Time].[1997])",
"Axis #0:\n"
+ "{[Customers].[USA].[CA].[San Francisco], [Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "Row #0: 2\n"
+ "Row #0: \n"
+ "Row #0: 2\n"
+ "Row #1: 4\n"
+ "Row #1: 2\n"
+ "Row #1: 2\n");
}
public void testOrder() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " order({\n"
+ " [Product].[All Products].[Drink],\n"
+ " [Product].[All Products].[Drink].[Beverages],\n"
+ " [Product].[All Products].[Drink].[Dairy],\n"
+ " [Product].[All Products].[Food],\n"
+ " [Product].[All Products].[Food].[Baked Goods],\n"
+ " [Product].[All Products].[Food].[Eggs],\n"
+ " [Product].[All Products]},\n"
+ " [Measures].[Unit Sales]) on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[All Products]}\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Food]}\n"
+ "{[Product].[Food].[Eggs]}\n"
+ "{[Product].[Food].[Baked Goods]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 24,597\n"
+ "Row #2: 4,186\n"
+ "Row #3: 13,573\n"
+ "Row #4: 191,940\n"
+ "Row #5: 4,132\n"
+ "Row #6: 7,870\n");
}
public void testOrderParentsMissing() {
// Paradoxically, [Alcoholic Beverages] comes before
// [Eggs] even though it has a larger value, because
// its parent [Drink] has a smaller value than [Food].
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,"
+ " order({\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages],\n"
+ " [Product].[All Products].[Food].[Eggs]},\n"
+ " [Measures].[Unit Sales], ASC) on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Product].[Food].[Eggs]}\n"
+ "Row #0: 6,838\n"
+ "Row #1: 4,132\n");
}
public void testOrderCrossJoinBreak() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns,\n"
+ " Order(\n"
+ " CrossJoin(\n"
+ " [Gender].children,\n"
+ " [Marital Status].children),\n"
+ " [Measures].[Unit Sales],\n"
+ " BDESC) on rows\n"
+ "from Sales\n"
+ "where [Time].[1997].[Q1]",
"Axis #0:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[M], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[M]}\n"
+ "{[Gender].[M], [Marital Status].[M]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "Row #0: 17,070\n"
+ "Row #1: 16,790\n"
+ "Row #2: 16,311\n"
+ "Row #3: 16,120\n");
}
public void testOrderCrossJoin() {
// Note:
// 1. [Alcoholic Beverages] collates before [Eggs] and
// [Seafood] because its parent, [Drink], is less
// than [Food]
// 2. [Seattle] generally sorts after [CA] and [OR]
// because invisible parent [WA] is greater.
assertQueryReturns(
"select CrossJoin(\n"
+ " {[Time].[1997],\n"
+ " [Time].[1997].[Q1]},\n"
+ " {[Measures].[Unit Sales]}) on columns,\n"
+ " Order(\n"
+ " CrossJoin(\n"
+ " {[Product].[All Products].[Food].[Eggs],\n"
+ " [Product].[All Products].[Food].[Seafood],\n"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]},\n"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]}),\n"
+ " ([Time].[1997].[Q1], [Measures].[Unit Sales]),\n"
+ " ASC) on rows\n"
+ "from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997], [Measures].[Unit Sales]}\n"
+ "{[Time].[1997].[Q1], [Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages], [Store].[USA].[OR]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages], [Store].[USA].[CA]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages], [Store].[USA].[WA].[Seattle]}\n"
+ "{[Product].[Food].[Seafood], [Store].[USA].[CA]}\n"
+ "{[Product].[Food].[Seafood], [Store].[USA].[OR]}\n"
+ "{[Product].[Food].[Seafood], [Store].[USA].[WA].[Seattle]}\n"
+ "{[Product].[Food].[Eggs], [Store].[USA].[CA]}\n"
+ "{[Product].[Food].[Eggs], [Store].[USA].[OR]}\n"
+ "{[Product].[Food].[Eggs], [Store].[USA].[WA].[Seattle]}\n"
+ "Row #0: 1,680\n"
+ "Row #0: 393\n"
+ "Row #1: 1,936\n"
+ "Row #1: 431\n"
+ "Row #2: 635\n"
+ "Row #2: 142\n"
+ "Row #3: 441\n"
+ "Row #3: 91\n"
+ "Row #4: 451\n"
+ "Row #4: 107\n"
+ "Row #5: 217\n"
+ "Row #5: 44\n"
+ "Row #6: 1,116\n"
+ "Row #6: 240\n"
+ "Row #7: 1,119\n"
+ "Row #7: 251\n"
+ "Row #8: 373\n"
+ "Row #8: 57\n");
}
public void testOrderHierarchicalDesc() {
assertAxisReturns(
"Order(\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Drink],\n"
+ " [Product].[Non-Consumable],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]},\n"
+ " [Measures].[Unit Sales],\n"
+ " DESC)",
"[Product].[All Products]\n"
+ "[Product].[Food]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Non-Consumable]\n"
+ "[Product].[Drink]\n"
+ "[Product].[Drink].[Dairy]");
}
public void testOrderCrossJoinDesc() {
assertAxisReturns(
"Order(\n"
+ " CrossJoin(\n"
+ " {[Gender].[M], [Gender].[F]},\n"
+ " {[Product].[All Products], "
+ " [Product].[Food],\n"
+ " [Product].[Drink],\n"
+ " [Product].[Non-Consumable],\n"
+ " [Product].[Food].[Eggs],\n"
+ " [Product].[Drink].[Dairy]}),\n"
+ " [Measures].[Unit Sales],\n"
+ " DESC)",
"{[Gender].[M], [Product].[All Products]}\n"
+ "{[Gender].[M], [Product].[Food]}\n"
+ "{[Gender].[M], [Product].[Food].[Eggs]}\n"
+ "{[Gender].[M], [Product].[Non-Consumable]}\n"
+ "{[Gender].[M], [Product].[Drink]}\n"
+ "{[Gender].[M], [Product].[Drink].[Dairy]}\n"
+ "{[Gender].[F], [Product].[All Products]}\n"
+ "{[Gender].[F], [Product].[Food]}\n"
+ "{[Gender].[F], [Product].[Food].[Eggs]}\n"
+ "{[Gender].[F], [Product].[Non-Consumable]}\n"
+ "{[Gender].[F], [Product].[Drink]}\n"
+ "{[Gender].[F], [Product].[Drink].[Dairy]}");
}
public void testOrderBug656802() {
// Note:
// 1. [Alcoholic Beverages] collates before [Eggs] and
// [Seafood] because its parent, [Drink], is less
// than [Food]
// 2. [Seattle] generally sorts after [CA] and [OR]
// because invisible parent [WA] is greater.
assertQueryReturns(
"select {[Measures].[Unit Sales], [Measures].[Store Cost], [Measures].[Store Sales]} ON columns, \n"
+ "Order(\n"
+ " ToggleDrillState(\n"
+ " {([Promotion Media].[All Media], [Product].[All Products])},\n"
+ " {[Product].[All Products]}), \n"
+ " [Measures].[Unit Sales], DESC) ON rows \n"
+ "from [Sales] where ([Time].[1997])",
"Axis #0:\n"
+ "{[Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Promotion Media].[All Media], [Product].[All Products]}\n"
+ "{[Promotion Media].[All Media], [Product].[Food]}\n"
+ "{[Promotion Media].[All Media], [Product].[Non-Consumable]}\n"
+ "{[Promotion Media].[All Media], [Product].[Drink]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 565,238.13\n"
+ "Row #1: 191,940\n"
+ "Row #1: 163,270.72\n"
+ "Row #1: 409,035.59\n"
+ "Row #2: 50,236\n"
+ "Row #2: 42,879.28\n"
+ "Row #2: 107,366.33\n"
+ "Row #3: 24,597\n"
+ "Row #3: 19,477.23\n"
+ "Row #3: 48,836.21\n");
}
public void testOrderBug712702_Simplified() {
assertQueryReturns(
"SELECT Order({[Time].[Year].members}, [Measures].[Unit Sales]) on columns\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1998]}\n"
+ "{[Time].[1997]}\n"
+ "Row #0: \n"
+ "Row #0: 266,773\n");
}
public void testOrderBug712702_Original() {
assertQueryReturns(
"with member [Measures].[Average Unit Sales] as 'Avg(Descendants([Time].[Time].CurrentMember, [Time].[Month]), \n"
+ "[Measures].[Unit Sales])' \n"
+ "member [Measures].[Max Unit Sales] as 'Max(Descendants([Time].[Time].CurrentMember, [Time].[Month]), [Measures].[Unit Sales])' \n"
+ "select {[Measures].[Average Unit Sales], [Measures].[Max Unit Sales], [Measures].[Unit Sales]} ON columns, \n"
+ " NON EMPTY Order(\n"
+ " Crossjoin(\n"
+ " {[Store].[USA].[OR].[Portland],\n"
+ " [Store].[USA].[OR].[Salem],\n"
+ " [Store].[USA].[OR].[Salem].[Store 13],\n"
+ " [Store].[USA].[CA].[San Francisco],\n"
+ " [Store].[USA].[CA].[San Diego],\n"
+ " [Store].[USA].[CA].[Beverly Hills],\n"
+ " [Store].[USA].[CA].[Los Angeles],\n"
+ " [Store].[USA].[WA].[Walla Walla],\n"
+ " [Store].[USA].[WA].[Bellingham],\n"
+ " [Store].[USA].[WA].[Yakima],\n"
+ " [Store].[USA].[WA].[Spokane],\n"
+ " [Store].[USA].[WA].[Seattle], \n"
+ " [Store].[USA].[WA].[Bremerton],\n"
+ " [Store].[USA].[WA].[Tacoma]},\n"
+ " [Time].[Year].Members), \n"
+ " [Measures].[Average Unit Sales], ASC) ON rows\n"
+ "from [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Average Unit Sales]}\n"
+ "{[Measures].[Max Unit Sales]}\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[OR].[Portland], [Time].[1997]}\n"
+ "{[Store].[USA].[OR].[Salem], [Time].[1997]}\n"
+ "{[Store].[USA].[OR].[Salem].[Store 13], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Time].[1997]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Walla Walla], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Bellingham], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Yakima], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Spokane], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Bremerton], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Seattle], [Time].[1997]}\n"
+ "{[Store].[USA].[WA].[Tacoma], [Time].[1997]}\n"
+ "Row #0: 2,173\n"
+ "Row #0: 2,933\n"
+ "Row #0: 26,079\n"
+ "Row #1: 3,465\n"
+ "Row #1: 5,891\n"
+ "Row #1: 41,580\n"
+ "Row #2: 3,465\n"
+ "Row #2: 5,891\n"
+ "Row #2: 41,580\n"
+ "Row #3: 176\n"
+ "Row #3: 222\n"
+ "Row #3: 2,117\n"
+ "Row #4: 1,778\n"
+ "Row #4: 2,545\n"
+ "Row #4: 21,333\n"
+ "Row #5: 2,136\n"
+ "Row #5: 2,686\n"
+ "Row #5: 25,635\n"
+ "Row #6: 2,139\n"
+ "Row #6: 2,669\n"
+ "Row #6: 25,663\n"
+ "Row #7: 184\n"
+ "Row #7: 301\n"
+ "Row #7: 2,203\n"
+ "Row #8: 186\n"
+ "Row #8: 275\n"
+ "Row #8: 2,237\n"
+ "Row #9: 958\n"
+ "Row #9: 1,163\n"
+ "Row #9: 11,491\n"
+ "Row #10: 1,966\n"
+ "Row #10: 2,634\n"
+ "Row #10: 23,591\n"
+ "Row #11: 2,048\n"
+ "Row #11: 2,623\n"
+ "Row #11: 24,576\n"
+ "Row #12: 2,084\n"
+ "Row #12: 2,304\n"
+ "Row #12: 25,011\n"
+ "Row #13: 2,938\n"
+ "Row #13: 3,818\n"
+ "Row #13: 35,257\n");
}
public void testOrderEmpty() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {},"
+ " [Customers].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testOrderOne() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young]},"
+ " [Customers].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 75\n");
}
public void testOrderKeyEmpty() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {},"
+ " [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testOrderKeyOne() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young]},"
+ " [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 75\n");
}
public void testOrderDesc() {
// based on olap4j's OlapTest.testSortDimension
assertQueryReturns(
"SELECT\n"
+ "{[Measures].[Store Sales]} ON COLUMNS,\n"
+ "{Order(\n"
+ " {{[Product].[Drink], [Product].[Drink].Children}},\n"
+ " [Product].CurrentMember.Name,\n"
+ " DESC)} ON ROWS\n"
+ "FROM [Sales]\n"
+ "WHERE {[Time].[1997].[Q3].[7]}",
"Axis #0:\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Drink].[Dairy]}\n"
+ "{[Product].[Drink].[Beverages]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages]}\n"
+ "Row #0: 4,409.58\n"
+ "Row #1: 629.69\n"
+ "Row #2: 2,477.02\n"
+ "Row #3: 1,302.87\n");
}
public void testOrderMemberMemberValueExpNew() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey,
true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
} finally {
if (context != null) {
context.close();
}
}
}
public void testOrderMemberMemberValueExpNew1() {
// sort by default measure
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderMemberDefaultFlag1() {
// flags not specified default to ASC - sort by default measure
assertQueryReturns(
"with \n"
+ " Member [Measures].[Zero] as '0' \n"
+ "select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.OrderKey) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
}
public void testOrderMemberDefaultFlag2() {
// flags not specified default to ASC
assertQueryReturns(
"with \n"
+ " Member [Measures].[Zero] as '0' \n"
+ "select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Measures].[Store Cost]) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testOrderMemberMemberValueExpHierarchy() {
// Santa Monica and Woodland Hills both don't have orderkey
// members are sorted by the order of their keys
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.OrderKey, DESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testOrderMemberMultiKeysMemberValueExp1() {
// sort by unit sales and then customer id (Adeline = 6442, Abe = 570)
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Measures].[Unit Sales], BDESC, [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n");
}
public void testOrderMemberMultiKeysMemberValueExp2() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].currentMember.Parent.Parent.OrderKey, BASC, [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderMemberMultiKeysMemberValueExpDepends() {
// should preserve order of Abe and Adeline (note second key is [Time])
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Measures].[Unit Sales], BDESC, [Time].[Time].currentMember, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n");
}
public void testOrderTupleSingleKeysNew() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]})'\n"
+ "select \n"
+ " Order([NECJ], [Customers].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun], [Store].[USA].[CA]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young], [Store].[USA].[CA]}\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel], [Store].[USA].[WA].[Seattle]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderTupleSingleKeysNew1() {
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
final TestContext context = getTestContext().withFreshConnection();
try {
context.assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " {[Store].[USA].[WA].[Seattle],\n"
+ " [Store].[USA].[CA],\n"
+ " [Store].[USA].[OR]})'\n"
+ "select \n"
+ " Order([NECJ], [Store].currentMember.OrderKey, DESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel], [Store].[USA].[WA].[Seattle]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young], [Store].[USA].[CA]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun], [Store].[USA].[CA]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
} finally {
context.close();
}
}
public void testOrderTupleMultiKeys1() {
assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Store].[USA].[CA],\n"
+ " [Store].[USA].[WA]},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " Order([NECJ], [Store].currentMember.OrderKey, BDESC, [Measures].[Unit Sales], BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testOrderTupleMultiKeys2() {
assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Store].[USA].[CA],\n"
+ " [Store].[USA].[WA]},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " Order([NECJ], [Measures].[Unit Sales], BDESC, Ancestor([Customers].currentMember, [Customers].[Name]).OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Store].[USA].[WA], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n");
}
public void testOrderTupleMultiKeys3() {
// WA unit sales is greater than CA unit sales
// Santa Monica unit sales (2660) is greater that Woodland hills (2516)
assertQueryReturns(
"with \n"
+ " set [NECJ] as \n"
+ " 'NonEmptyCrossJoin( \n"
+ " {[Store].[USA].[CA],\n"
+ " [Store].[USA].[WA]},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " Order([NECJ], [Measures].[Unit Sales], DESC, Ancestor([Customers].currentMember, [Customers].[Name]), BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Store].[USA].[CA], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
}
public void testOrderTupleMultiKeyswithVCube() {
// WA unit sales is greater than CA unit sales
propSaver.set(
MondrianProperties.instance().CompareSiblingsByOrderKey, true);
// Use a fresh connection to make sure bad member ordinals haven't
// been assigned by previous tests.
// a non-sense cube just to test ordering by order key
TestContext context = TestContext.instance().create(
null,
null,
"<VirtualCube name=\"Sales vs HR\">\n"
+ "<VirtualCubeDimension cubeName=\"Sales\" name=\"Customers\"/>\n"
+ "<VirtualCubeDimension cubeName=\"HR\" name=\"Position\"/>\n"
+ "<VirtualCubeMeasure cubeName=\"HR\" name=\"[Measures].[Org Salary]\"/>\n"
+ "</VirtualCube>",
null, null, null);
context.assertQueryReturns(
"with \n"
+ " set [CJ] as \n"
+ " 'CrossJoin( \n"
+ " {[Position].[Store Management].children},\n"
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]})' \n"
+ "select \n"
+ " [Measures].[Org Salary] on columns, \n"
+ " Order([CJ], [Position].currentMember.OrderKey, BASC, Ancestor([Customers].currentMember, [Customers].[Name]).OrderKey, BDESC) \n"
+ "on rows \n"
+ "from [Sales vs HR]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Org Salary]}\n"
+ "Axis #2:\n"
+ "{[Position].[Store Management].[Store Manager], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Position].[Store Management].[Store Manager], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Position].[Store Management].[Store Manager], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Position].[Store Management].[Store Assistant Manager], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Position].[Store Management].[Store Assistant Manager], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Position].[Store Management].[Store Assistant Manager], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Position].[Store Management].[Store Shift Supervisor], [Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Position].[Store Management].[Store Shift Supervisor], [Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Position].[Store Management].[Store Shift Supervisor], [Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "Row #0: \n"
+ "Row #1: \n"
+ "Row #2: \n"
+ "Row #3: \n"
+ "Row #4: \n"
+ "Row #5: \n"
+ "Row #6: \n"
+ "Row #7: \n"
+ "Row #8: \n");
}
public void testOrderConstant1() {
//sort by customerId (Abel = 7851, Adeline = 6442, Abe = 570)
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Customers].[USA].OrderKey, BDESC, [Customers].currentMember.OrderKey, BASC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "Row #0: 33\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n");
}
public void testOrderDiffrentDim() {
assertQueryReturns(
"select \n"
+ " Order("
+ " {[Customers].[USA].[WA].[Issaquah].[Abe Tramel],"
+ " [Customers].[All Customers].[USA].[CA].[Woodland Hills].[Abel Young],"
+ " [Customers].[All Customers].[USA].[CA].[Santa Monica].[Adeline Chun]},"
+ " [Product].currentMember.OrderKey, BDESC, [Gender].currentMember.OrderKey, BDESC) \n"
+ "on 0 from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[WA].[Issaquah].[Abe Tramel]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills].[Abel Young]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica].[Adeline Chun]}\n"
+ "Row #0: 33\n"
+ "Row #0: 75\n"
+ "Row #0: 33\n");
}
public void testUnorder() {
assertAxisReturns(
"Unorder([Gender].members)",
"[Gender].[All Gender]\n"
+ "[Gender].[F]\n"
+ "[Gender].[M]");
assertAxisReturns(
"Unorder(Order([Gender].members, -[Measures].[Unit Sales]))",
"[Gender].[All Gender]\n"
+ "[Gender].[M]\n"
+ "[Gender].[F]");
assertAxisReturns(
"Unorder(Crossjoin([Gender].members, [Marital Status].Children))",
"{[Gender].[All Gender], [Marital Status].[M]}\n"
+ "{[Gender].[All Gender], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[M]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "{[Gender].[M], [Marital Status].[M]}\n"
+ "{[Gender].[M], [Marital Status].[S]}");
// implicitly convert member to set
assertAxisReturns(
"Unorder([Gender].[M])",
"[Gender].[M]");
assertAxisThrows(
"Unorder(1 + 3)",
"No function matches signature 'Unorder(<Numeric Expression>)'");
assertAxisThrows(
"Unorder([Gender].[M], 1 + 3)",
"No function matches signature 'Unorder(<Member>, <Numeric Expression>)'");
assertQueryReturns(
"select {[Measures].[Store Sales], [Measures].[Unit Sales]} on 0,\n"
+ " Unorder([Gender].Members) on 1\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 266,773\n"
+ "Row #1: 280,226.21\n"
+ "Row #1: 131,558\n"
+ "Row #2: 285,011.92\n"
+ "Row #2: 135,215\n");
}
public void testSiblingsA() {
assertAxisReturns(
"{[Time].[1997].Siblings}",
"[Time].[1997]\n"
+ "[Time].[1998]");
}
public void testSiblingsB() {
assertAxisReturns(
"{[Store].Siblings}",
"[Store].[All Stores]");
}
public void testSiblingsC() {
assertAxisReturns(
"{[Store].[USA].[CA].Siblings}",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
}
public void testSiblingsD() {
// The null member has no siblings -- not even itself
assertAxisReturns("{[Gender].Parent.Siblings}", "");
assertExprReturns(
"count ([Gender].parent.siblings, includeempty)", "0");
}
public void testSubset() {
assertAxisReturns(
"Subset([Promotion Media].Children, 7, 2)",
"[Promotion Media].[Product Attachment]\n"
+ "[Promotion Media].[Radio]");
}
public void testSubsetNegativeCount() {
assertAxisReturns(
"Subset([Promotion Media].Children, 3, -1)",
"");
}
public void testSubsetNegativeStart() {
assertAxisReturns(
"Subset([Promotion Media].Children, -2, 4)",
"");
}
public void testSubsetDefault() {
assertAxisReturns(
"Subset([Promotion Media].Children, 11)",
"[Promotion Media].[Sunday Paper, Radio]\n"
+ "[Promotion Media].[Sunday Paper, Radio, TV]\n"
+ "[Promotion Media].[TV]");
}
public void testSubsetOvershoot() {
assertAxisReturns(
"Subset([Promotion Media].Children, 15)",
"");
}
public void testSubsetEmpty() {
assertAxisReturns(
"Subset([Gender].[F].Children, 1)",
"");
assertAxisReturns(
"Subset([Gender].[F].Children, 1, 3)",
"");
}
public void testTail() {
assertAxisReturns(
"Tail([Store].Children, 2)",
"[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testTailNegative() {
assertAxisReturns(
"Tail([Store].Children, 2 - 3)",
"");
}
public void testTailDefault() {
assertAxisReturns(
"Tail([Store].Children)",
"[Store].[USA]");
}
public void testTailOvershoot() {
assertAxisReturns(
"Tail([Store].Children, 2 + 2)",
"[Store].[Canada]\n"
+ "[Store].[Mexico]\n"
+ "[Store].[USA]");
}
public void testTailEmpty() {
assertAxisReturns(
"Tail([Gender].[F].Children, 2)",
"");
assertAxisReturns(
"Tail([Gender].[F].Children)",
"");
}
public void testToggleDrillState() {
assertAxisReturns(
"ToggleDrillState({[Customers].[USA],[Customers].[Canada]},"
+ "{[Customers].[USA],[Customers].[USA].[CA]})",
"[Customers].[USA]\n"
+ "[Customers].[USA].[CA]\n"
+ "[Customers].[USA].[OR]\n"
+ "[Customers].[USA].[WA]\n"
+ "[Customers].[Canada]");
}
public void testToggleDrillState2() {
assertAxisReturns(
"ToggleDrillState([Product].[Product Department].members, "
+ "{[Product].[All Products].[Food].[Snack Foods]})",
"[Product].[Drink].[Alcoholic Beverages]\n"
+ "[Product].[Drink].[Beverages]\n"
+ "[Product].[Drink].[Dairy]\n"
+ "[Product].[Food].[Baked Goods]\n"
+ "[Product].[Food].[Baking Goods]\n"
+ "[Product].[Food].[Breakfast Foods]\n"
+ "[Product].[Food].[Canned Foods]\n"
+ "[Product].[Food].[Canned Products]\n"
+ "[Product].[Food].[Dairy]\n"
+ "[Product].[Food].[Deli]\n"
+ "[Product].[Food].[Eggs]\n"
+ "[Product].[Food].[Frozen Foods]\n"
+ "[Product].[Food].[Meat]\n"
+ "[Product].[Food].[Produce]\n"
+ "[Product].[Food].[Seafood]\n"
+ "[Product].[Food].[Snack Foods]\n"
+ "[Product].[Food].[Snack Foods].[Snack Foods]\n"
+ "[Product].[Food].[Snacks]\n"
+ "[Product].[Food].[Starchy Foods]\n"
+ "[Product].[Non-Consumable].[Carousel]\n"
+ "[Product].[Non-Consumable].[Checkout]\n"
+ "[Product].[Non-Consumable].[Health and Hygiene]\n"
+ "[Product].[Non-Consumable].[Household]\n"
+ "[Product].[Non-Consumable].[Periodicals]");
}
public void testToggleDrillState3() {
assertAxisReturns(
"ToggleDrillState("
+ "{[Time].[1997].[Q1],"
+ " [Time].[1997].[Q2],"
+ " [Time].[1997].[Q2].[4],"
+ " [Time].[1997].[Q2].[6],"
+ " [Time].[1997].[Q3]},"
+ "{[Time].[1997].[Q2]})",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]");
}
// bug 634860
public void testToggleDrillStateTuple() {
assertAxisReturns(
"ToggleDrillState(\n"
+ "{([Store].[USA].[CA],"
+ " [Product].[All Products].[Drink].[Alcoholic Beverages]),\n"
+ " ([Store].[USA],"
+ " [Product].[All Products].[Drink])},\n"
+ "{[Store].[All stores].[USA].[CA]})",
"{[Store].[USA].[CA], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[Alameda], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[Beverly Hills], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[Los Angeles], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[San Diego], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA].[CA].[San Francisco], [Product].[Drink].[Alcoholic Beverages]}\n"
+ "{[Store].[USA], [Product].[Drink]}");
}
public void testToggleDrillStateRecursive() {
// We expect this to fail.
assertQueryThrows(
"Select \n"
+ " ToggleDrillState(\n"
+ " {[Store].[USA]}, \n"
+ " {[Store].[USA]}, recursive) on Axis(0) \n"
+ "from [Sales]\n",
"'RECURSIVE' is not supported in ToggleDrillState.");
}
public void testTopCount() {
assertAxisReturns(
"TopCount({[Promotion Media].[Media Type].members}, 2, [Measures].[Unit Sales])",
"[Promotion Media].[No Media]\n"
+ "[Promotion Media].[Daily Paper, Radio, TV]");
}
public void testTopCountTuple() {
assertAxisReturns(
"TopCount([Customers].[Name].members,2,(Time.[1997].[Q1],[Measures].[Store Sales]))",
"[Customers].[USA].[WA].[Spokane].[Grace McLaughlin]\n"
+ "[Customers].[USA].[WA].[Spokane].[Matt Bellah]");
}
public void testTopCountEmpty() {
assertAxisReturns(
"TopCount(Filter({[Promotion Media].[Media Type].members}, 1=0), 2, [Measures].[Unit Sales])",
"");
}
public void testTopCountDepends() {
checkTopBottomCountPercentDepends("TopCount");
checkTopBottomCountPercentDepends("TopPercent");
checkTopBottomCountPercentDepends("TopSum");
checkTopBottomCountPercentDepends("BottomCount");
checkTopBottomCountPercentDepends("BottomPercent");
checkTopBottomCountPercentDepends("BottomSum");
}
private void checkTopBottomCountPercentDepends(String fun) {
String s1 =
TestContext.allHiersExcept("[Measures]", "[Promotion Media]");
getTestContext().assertSetExprDependsOn(
fun
+ "({[Promotion Media].[Media Type].members}, "
+ "2, [Measures].[Unit Sales])",
s1);
if (fun.endsWith("Count")) {
getTestContext().assertSetExprDependsOn(
fun + "({[Promotion Media].[Media Type].members}, 2)",
"{}");
}
}
/**
* Tests TopCount applied to a large result set.
*
* <p>Before optimizing (see FunUtil.partialSort), on a 2-core 32-bit 2.4GHz
* machine, the 1st query took 14.5 secs, the 2nd query took 5.0 secs.
* After optimizing, who knows?
*/
public void testTopCountHuge() {
// TODO convert printfs to trace
final String query =
"SELECT [Measures].[Store Sales] ON 0,\n"
+ "TopCount([Time].[Month].members * "
+ "[Customers].[Name].members, 3, [Measures].[Store Sales]) ON 1\n"
+ "FROM [Sales]";
final String desiredResult =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1].[3], [Customers].[USA].[WA].[Spokane].[George Todero]}\n"
+ "{[Time].[1997].[Q3].[7], [Customers].[USA].[WA].[Spokane].[James Horvat]}\n"
+ "{[Time].[1997].[Q4].[11], [Customers].[USA].[WA].[Olympia].[Charles Stanley]}\n"
+ "Row #0: 234.83\n"
+ "Row #1: 199.46\n"
+ "Row #2: 191.90\n";
long now = System.currentTimeMillis();
assertQueryReturns(query, desiredResult);
LOGGER.info("first query took " + (System.currentTimeMillis() - now));
now = System.currentTimeMillis();
assertQueryReturns(query, desiredResult);
LOGGER.info("second query took " + (System.currentTimeMillis() - now));
}
public void testTopPercent() {
assertAxisReturns(
"TopPercent({[Promotion Media].[Media Type].members}, 70, [Measures].[Unit Sales])",
"[Promotion Media].[No Media]");
}
//todo: test precision
public void testTopSum() {
assertAxisReturns(
"TopSum({[Promotion Media].[Media Type].members}, 200000, [Measures].[Unit Sales])",
"[Promotion Media].[No Media]\n"
+ "[Promotion Media].[Daily Paper, Radio, TV]");
}
public void testTopSumEmpty() {
assertAxisReturns(
"TopSum(Filter({[Promotion Media].[Media Type].members}, 1=0), "
+ "200000, [Measures].[Unit Sales])",
"");
}
public void testUnionAll() {
assertAxisReturns(
"Union({[Gender].[M]}, {[Gender].[F]}, ALL)",
"[Gender].[M]\n"
+ "[Gender].[F]"); // order is preserved
}
public void testUnionAllTuple() {
// With the bug, the last 8 rows are repeated.
assertQueryReturns(
"with \n"
+ "set [Set1] as 'Crossjoin({[Time].[1997].[Q1]:[Time].[1997].[Q4]},{[Store].[USA].[CA]:[Store].[USA].[OR]})'\n"
+ "set [Set2] as 'Crossjoin({[Time].[1997].[Q2]:[Time].[1997].[Q3]},{[Store].[Mexico].[DF]:[Store].[Mexico].[Veracruz]})'\n"
+ "select \n"
+ "{[Measures].[Unit Sales]} ON COLUMNS,\n"
+ "Union([Set1], [Set2], ALL) ON ROWS\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q1], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q2], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q2], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q3], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q3], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q4], [Store].[USA].[CA]}\n"
+ "{[Time].[1997].[Q4], [Store].[USA].[OR]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[DF]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[Guerrero]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[Jalisco]}\n"
+ "{[Time].[1997].[Q2], [Store].[Mexico].[Veracruz]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[DF]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[Guerrero]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[Jalisco]}\n"
+ "{[Time].[1997].[Q3], [Store].[Mexico].[Veracruz]}\n"
+ "Row #0: 16,890\n"
+ "Row #1: 19,287\n"
+ "Row #2: 18,052\n"
+ "Row #3: 15,079\n"
+ "Row #4: 18,370\n"
+ "Row #5: 16,940\n"
+ "Row #6: 21,436\n"
+ "Row #7: 16,353\n"
+ "Row #8: \n"
+ "Row #9: \n"
+ "Row #10: \n"
+ "Row #11: \n"
+ "Row #12: \n"
+ "Row #13: \n"
+ "Row #14: \n"
+ "Row #15: \n");
}
public void testUnion() {
assertAxisReturns(
"Union({[Store].[USA], [Store].[USA], [Store].[USA].[OR]}, "
+ "{[Store].[USA].[CA], [Store].[USA]})",
"[Store].[USA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[CA]");
}
public void testUnionEmptyBoth() {
assertAxisReturns(
"Union({}, {})",
"");
}
public void testUnionEmptyRight() {
assertAxisReturns(
"Union({[Gender].[M]}, {})",
"[Gender].[M]");
}
public void testUnionTuple() {
assertAxisReturns(
"Union({"
+ " ([Gender].[M], [Marital Status].[S]),"
+ " ([Gender].[F], [Marital Status].[S])"
+ "}, {"
+ " ([Gender].[M], [Marital Status].[M]),"
+ " ([Gender].[M], [Marital Status].[S])"
+ "})",
"{[Gender].[M], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "{[Gender].[M], [Marital Status].[M]}");
}
public void testUnionTupleDistinct() {
assertAxisReturns(
"Union({"
+ " ([Gender].[M], [Marital Status].[S]),"
+ " ([Gender].[F], [Marital Status].[S])"
+ "}, {"
+ " ([Gender].[M], [Marital Status].[M]),"
+ " ([Gender].[M], [Marital Status].[S])"
+ "}, Distinct)",
"{[Gender].[M], [Marital Status].[S]}\n"
+ "{[Gender].[F], [Marital Status].[S]}\n"
+ "{[Gender].[M], [Marital Status].[M]}");
}
public void testUnionQuery() {
Result result = executeQuery(
"select {[Measures].[Unit Sales], "
+ "[Measures].[Store Cost], "
+ "[Measures].[Store Sales]} on columns,\n"
+ " Hierarchize(\n"
+ " Union(\n"
+ " Crossjoin(\n"
+ " Crossjoin([Gender].[All Gender].children,\n"
+ " [Marital Status].[All Marital Status].children),\n"
+ " Crossjoin([Customers].[All Customers].children,\n"
+ " [Product].[All Products].children) ),\n"
+ " Crossjoin({([Gender].[All Gender].[M], [Marital Status].[All Marital Status].[M])},\n"
+ " Crossjoin(\n"
+ " [Customers].[All Customers].[USA].children,\n"
+ " [Product].[All Products].children) ) )) on rows\n"
+ "from Sales where ([Time].[1997])");
final Axis rowsAxis = result.getAxes()[1];
Assert.assertEquals(45, rowsAxis.getPositions().size());
}
public void testItemMember() {
assertExprReturns(
"Descendants([Time].[1997], [Time].[Month]).Item(1).Item(0).UniqueName",
"[Time].[1997].[Q1].[2]");
// Access beyond the list yields the Null member.
if (isDefaultNullMemberRepresentation()) {
assertExprReturns(
"[Time].[1997].Children.Item(6).UniqueName", "[Time].[#null]");
assertExprReturns(
"[Time].[1997].Children.Item(-1).UniqueName", "[Time].[#null]");
}
}
public void testItemTuple() {
assertExprReturns(
"CrossJoin([Gender].[All Gender].children, "
+ "[Time].[1997].[Q2].children).Item(0).Item(1).UniqueName",
"[Time].[1997].[Q2].[4]");
}
public void testStrToMember() {
assertExprReturns(
"StrToMember(\"[Time].[1997].[Q2].[4]\").Name",
"4");
}
public void testStrToMemberUniqueName() {
assertExprReturns(
"StrToMember(\"[Store].[USA].[CA]\").Name",
"CA");
}
public void testStrToMemberFullyQualifiedName() {
assertExprReturns(
"StrToMember(\"[Store].[All Stores].[USA].[CA]\").Name",
"CA");
}
public void testStrToMemberNull() {
// SSAS 2005 gives "#Error An MDX expression was expected. An empty
// expression was specified."
assertExprThrows(
"StrToMember(null).Name",
"An MDX expression was expected. An empty expression was specified");
assertExprThrows(
"StrToSet(null, [Gender]).Count",
"An MDX expression was expected. An empty expression was specified");
assertExprThrows(
"StrToTuple(null, [Gender]).Name",
"An MDX expression was expected. An empty expression was specified");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-560">
* bug MONDRIAN-560, "StrToMember function doesn't use IgnoreInvalidMembers
* option"</a>.
*/
public void testStrToMemberIgnoreInvalidMembers() {
final MondrianProperties properties = MondrianProperties.instance();
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, true);
// [Product].[Drugs] is invalid, becomes null member, and is dropped
// from list
assertQueryReturns(
"select \n"
+ " {[Product].[Food],\n"
+ " StrToMember(\"[Product].[Drugs]\")} on columns,\n"
+ " {[Measures].[Unit Sales]} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[Food]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Row #0: 191,940\n");
// Hierarchy is inferred from leading edge
assertExprReturns(
"StrToMember(\"[Marital Status].[Separated]\").Hierarchy.Name",
"Marital Status");
// Null member is returned
assertExprReturns(
"StrToMember(\"[Marital Status].[Separated]\").Name",
"#null");
// Use longest valid prefix, so get [Time].[Weekly] rather than just
// [Time].
final String timeWeekly = TestContext.hierarchyName("Time", "Weekly");
assertExprReturns(
"StrToMember(\"" + timeWeekly
+ ".[1996].[Q1]\").Hierarchy.UniqueName",
timeWeekly);
// If hierarchy is invalid, throw an error even though
// IgnoreInvalidMembersDuringQuery is set.
assertExprThrows(
"StrToMember(\"[Unknown Hierarchy].[Invalid].[Member]\").Name",
"MDX object '[Unknown Hierarchy].[Invalid].[Member]' not found in cube 'Sales'");
assertExprThrows(
"StrToMember(\"[Unknown Hierarchy].[Invalid]\").Name",
"MDX object '[Unknown Hierarchy].[Invalid]' not found in cube 'Sales'");
assertExprThrows(
"StrToMember(\"[Unknown Hierarchy]\").Name",
"MDX object '[Unknown Hierarchy]' not found in cube 'Sales'");
assertAxisThrows(
"StrToMember(\"\")",
"MDX object '' not found in cube 'Sales'");
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, false);
assertQueryThrows(
"select \n"
+ " {[Product].[Food],\n"
+ " StrToMember(\"[Product].[Drugs]\")} on columns,\n"
+ " {[Measures].[Unit Sales]} on rows\n"
+ "from [Sales]",
"Member '[Product].[Drugs]' not found");
assertExprThrows(
"StrToMember(\"[Marital Status].[Separated]\").Hierarchy.Name",
"Member '[Marital Status].[Separated]' not found");
}
public void testStrToTuple() {
// single dimension yields member
assertAxisReturns(
"{StrToTuple(\"[Time].[1997].[Q2]\", [Time])}",
"[Time].[1997].[Q2]");
// multiple dimensions yield tuple
assertAxisReturns(
"{StrToTuple(\"([Gender].[F], [Time].[1997].[Q2])\", [Gender], [Time])}",
"{[Gender].[F], [Time].[1997].[Q2]}");
// todo: test for garbage at end of string
}
public void testStrToTupleIgnoreInvalidMembers() {
final MondrianProperties properties = MondrianProperties.instance();
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, true);
// If any member is invalid, the whole tuple is null.
assertAxisReturns(
"StrToTuple(\"([Gender].[M], [Marital Status].[Separated])\","
+ " [Gender], [Marital Status])",
"");
}
public void testStrToTupleDuHierarchiesFails() {
assertAxisThrows(
"{StrToTuple(\"([Gender].[F], [Time].[1997].[Q2], [Gender].[M])\", [Gender], [Time], [Gender])}",
"Tuple contains more than one member of hierarchy '[Gender]'.");
}
public void testStrToTupleDupHierInSameDimensions() {
assertAxisThrows(
"{StrToTuple("
+ "\"([Gender].[F], "
+ "[Time].[1997].[Q2], "
+ "[Time].[Weekly].[1997].[10])\","
+ " [Gender], "
+ TestContext.hierarchyName("Time", "Weekly")
+ ", [Gender])}",
"Tuple contains more than one member of hierarchy '[Gender]'.");
}
public void testStrToTupleDepends() {
getTestContext().assertMemberExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2]\", [Time])",
"{}");
// converted to scalar, depends set is larger
getTestContext().assertExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2]\", [Time])",
TestContext.allHiersExcept("[Time]"));
getTestContext().assertMemberExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2], [Gender].[F]\", [Time], [Gender])",
"{}");
getTestContext().assertExprDependsOn(
"StrToTuple(\"[Time].[1997].[Q2], [Gender].[F]\", [Time], [Gender])",
TestContext.allHiersExcept("[Time]", "[Gender]"));
}
public void testStrToSet() {
// TODO: handle text after '}'
// TODO: handle string which ends too soon
// TODO: handle spaces before first '{'
// TODO: test spaces before unbracketed names,
// e.g. "{Gender. M, Gender. F }".
assertAxisReturns(
"StrToSet("
+ " \"{[Gender].[F], [Gender].[M]}\","
+ " [Gender])",
"[Gender].[F]\n"
+ "[Gender].[M]");
assertAxisThrows(
"StrToSet("
+ " \"{[Gender].[F], [Time].[1997]}\","
+ " [Gender])",
"member is of wrong hierarchy");
// whitespace ok
assertAxisReturns(
"StrToSet("
+ " \" { [Gender] . [F] ,[Gender].[M] } \","
+ " [Gender])",
"[Gender].[F]\n"
+ "[Gender].[M]");
// tuples
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " ([Gender].[F], [Time].[1997].[Q2]), "
+ " ([Gender].[M], [Time].[1997])"
+ "}"
+ "\","
+ " [Gender],"
+ " [Time])",
"{[Gender].[F], [Time].[1997].[Q2]}\n"
+ "{[Gender].[M], [Time].[1997]}");
// matches unique name
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " [Store].[USA].[CA], "
+ " [Store].[All Stores].[USA].OR,"
+ " [Store].[All Stores]. [USA] . [WA]"
+ "}"
+ "\","
+ " [Store])",
"[Store].[USA].[CA]\n"
+ "[Store].[USA].[OR]\n"
+ "[Store].[USA].[WA]");
}
public void testStrToSetDupDimensionsFails() {
assertAxisThrows(
"StrToSet("
+ "\""
+ "{"
+ " ([Gender].[F], [Time].[1997].[Q2], [Gender].[F]), "
+ " ([Gender].[M], [Time].[1997], [Gender].[F])"
+ "}"
+ "\","
+ " [Gender],"
+ " [Time],"
+ " [Gender])",
"Tuple contains more than one member of hierarchy '[Gender]'.");
}
public void testStrToSetIgnoreInvalidMembers() {
final MondrianProperties properties = MondrianProperties.instance();
propSaver.set(properties.IgnoreInvalidMembersDuringQuery, true);
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " [Product].[Food],"
+ " [Product].[Food].[You wouldn't like],"
+ " [Product].[Drink].[You would like],"
+ " [Product].[Drink].[Dairy]"
+ "}"
+ "\","
+ " [Product])",
"[Product].[Food]\n"
+ "[Product].[Drink].[Dairy]");
assertAxisReturns(
"StrToSet("
+ "\""
+ "{"
+ " ([Gender].[M], [Product].[Food]),"
+ " ([Gender].[F], [Product].[Food].[You wouldn't like]),"
+ " ([Gender].[M], [Product].[Drink].[You would like]),"
+ " ([Gender].[F], [Product].[Drink].[Dairy])"
+ "}"
+ "\","
+ " [Gender], [Product])",
"{[Gender].[M], [Product].[Food]}\n"
+ "{[Gender].[F], [Product].[Drink].[Dairy]}");
}
public void testYtd() {
assertAxisReturns(
"Ytd()",
"[Time].[1997]");
assertAxisReturns(
"Ytd([Time].[1997].[Q3])",
"[Time].[1997].[Q1]\n"
+ "[Time].[1997].[Q2]\n"
+ "[Time].[1997].[Q3]");
assertAxisReturns(
"Ytd([Time].[1997].[Q2].[4])",
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]");
assertAxisThrows(
"Ytd([Store])",
"Argument to function 'Ytd' must belong to Time hierarchy");
getTestContext().assertSetExprDependsOn(
"Ytd()",
"{[Time], " + TimeWeekly + "}");
getTestContext().assertSetExprDependsOn(
"Ytd([Time].[1997].[Q2])",
"{}");
}
/**
* Testcase for
* <a href="http://jira.pentaho.com/browse/MONDRIAN-458">
* bug MONDRIAN-458, "error deducing type of Ytd/Qtd/Mtd functions within
* Generate"</a>.
*/
public void testGeneratePlusXtd() {
assertAxisReturns(
"generate(\n"
+ " {[Time].[1997].[Q1].[2], [Time].[1997].[Q3].[7]},\n"
+ " {Ytd( [Time].[Time].currentMember)})",
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]");
assertAxisReturns(
"generate(\n"
+ " {[Time].[1997].[Q1].[2], [Time].[1997].[Q3].[7]},\n"
+ " {Ytd( [Time].[Time].currentMember)}, ALL)",
"[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[1]\n"
+ "[Time].[1997].[Q1].[2]\n"
+ "[Time].[1997].[Q1].[3]\n"
+ "[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]\n"
+ "[Time].[1997].[Q2].[6]\n"
+ "[Time].[1997].[Q3].[7]");
assertExprReturns(
"count(generate({[Time].[1997].[Q4].[11]},"
+ " {Qtd( [Time].[Time].currentMember)}))",
2, 0);
assertExprReturns(
"count(generate({[Time].[1997].[Q4].[11]},"
+ " {Mtd( [Time].[Time].currentMember)}))",
1, 0);
}
public void testQtd() {
// zero args
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(Qtd()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[4], [Time].[1997].[Q2].[5]}\n");
// one arg, a month
assertAxisReturns(
"Qtd([Time].[1997].[Q2].[5])",
"[Time].[1997].[Q2].[4]\n"
+ "[Time].[1997].[Q2].[5]");
// one arg, a quarter
assertAxisReturns(
"Qtd([Time].[1997].[Q2])",
"[Time].[1997].[Q2]");
// one arg, a year
assertAxisReturns(
"Qtd([Time].[1997])",
"");
assertAxisThrows(
"Qtd([Store])",
"Argument to function 'Qtd' must belong to Time hierarchy");
}
public void testMtd() {
// zero args
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(Mtd()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[5]}\n");
// one arg, a month
assertAxisReturns(
"Mtd([Time].[1997].[Q2].[5])",
"[Time].[1997].[Q2].[5]");
// one arg, a quarter
assertAxisReturns(
"Mtd([Time].[1997].[Q2])",
"");
// one arg, a year
assertAxisReturns(
"Mtd([Time].[1997])",
"");
assertAxisThrows(
"Mtd([Store])",
"Argument to function 'Mtd' must belong to Time hierarchy");
}
public void testPeriodsToDate() {
getTestContext().assertSetExprDependsOn("PeriodsToDate()", "{[Time]}");
getTestContext().assertSetExprDependsOn(
"PeriodsToDate([Time].[Year])",
"{[Time]}");
getTestContext().assertSetExprDependsOn(
"PeriodsToDate([Time].[Year], [Time].[1997].[Q2].[5])", "{}");
// two args
assertAxisReturns(
"PeriodsToDate([Time].[Quarter], [Time].[1997].[Q2].[5])",
"[Time].[1997].[Q2].[4]\n" + "[Time].[1997].[Q2].[5]");
// equivalent to above
assertAxisReturns(
"TopCount("
+ " Descendants("
+ " Ancestor("
+ " [Time].[1997].[Q2].[5], [Time].[Quarter]),"
+ " [Time].[1997].[Q2].[5].Level),"
+ " 1).Item(0) : [Time].[1997].[Q2].[5]",
"[Time].[1997].[Q2].[4]\n" + "[Time].[1997].[Q2].[5]");
// one arg
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(PeriodsToDate([Time].[Quarter])) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[4], [Time].[1997].[Q2].[5]}\n");
// zero args
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(PeriodsToDate()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997].[Q2].[5]",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {[Time].[1997].[Q2].[4], [Time].[1997].[Q2].[5]}\n");
// zero args, evaluated at a member which is at the top level.
// The default level is the level above the current member -- so
// choosing a member at the highest level might trip up the
// implementation.
assertQueryReturns(
"with member [Measures].[Foo] as ' SetToStr(PeriodsToDate()) '\n"
+ "select {[Measures].[Foo]} on columns\n"
+ "from [Sales]\n"
+ "where [Time].[1997]",
"Axis #0:\n"
+ "{[Time].[1997]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Foo]}\n"
+ "Row #0: {}\n");
// Testcase for bug 1598379, which caused NPE because the args[0].type
// knew its dimension but not its hierarchy.
assertQueryReturns(
"with member [Measures].[Position] as\n"
+ " 'Sum("
+ "PeriodsToDate([Time].[Time].Levels(0),"
+ " [Time].[Time].CurrentMember), "
+ "[Measures].[Store Sales])'\n"
+ "select {[Time].[1997],\n"
+ " [Time].[1997].[Q1],\n"
+ " [Time].[1997].[Q1].[1],\n"
+ " [Time].[1997].[Q1].[2],\n"
+ " [Time].[1997].[Q1].[3]} ON COLUMNS,\n"
+ "{[Measures].[Store Sales], [Measures].[Position] } ON ROWS\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Position]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #0: 139,628.35\n"
+ "Row #0: 45,539.69\n"
+ "Row #0: 44,058.79\n"
+ "Row #0: 50,029.87\n"
+ "Row #1: 565,238.13\n"
+ "Row #1: 139,628.35\n"
+ "Row #1: 45,539.69\n"
+ "Row #1: 89,598.48\n"
+ "Row #1: 139,628.35\n");
assertQueryReturns(
"select\n"
+ "{[Measures].[Unit Sales]} on columns,\n"
+ "periodstodate(\n"
+ " [Product].[Product Category],\n"
+ " [Product].[Food].[Baked Goods].[Bread].[Muffins]) on rows\n"
+ "from [Sales]\n"
+ "",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 815\n"
+ "Row #1: 3,497\n"
+ "");
// TODO: enable
if (false) {
assertExprThrows(
"Sum(PeriodsToDate([Time.Weekly].[Year], [Time].CurrentMember), [Measures].[Unit Sales])",
"wrong dimension");
}
}
public void testSetToStr() {
assertExprReturns(
"SetToStr([Time].[Time].children)",
"{[Time].[1997].[Q1], [Time].[1997].[Q2], [Time].[1997].[Q3], [Time].[1997].[Q4]}");
// Now, applied to tuples
assertExprReturns(
"SetToStr({CrossJoin([Marital Status].children, {[Gender].[M]})})",
"{"
+ "([Marital Status].[M], [Gender].[M]), "
+ "([Marital Status].[S], [Gender].[M])"
+ "}");
}
public void testTupleToStr() {
// Applied to a dimension (which becomes a member)
assertExprReturns(
"TupleToStr([Product])",
"[Product].[All Products]");
// Applied to a dimension (invalid because has no default hierarchy)
if (MondrianProperties.instance().SsasCompatibleNaming.get()) {
assertExprThrows(
"TupleToStr([Time])",
"The 'Time' dimension contains more than one hierarchy, "
+ "therefore the hierarchy must be explicitly specified.");
} else {
assertExprReturns(
"TupleToStr([Time])",
"[Time].[1997]");
}
// Applied to a hierarchy
assertExprReturns(
"TupleToStr([Time].[Time])",
"[Time].[1997]");
// Applied to a member
assertExprReturns(
"TupleToStr([Store].[USA].[OR])",
"[Store].[USA].[OR]");
// Applied to a member (extra set of parens)
assertExprReturns(
"TupleToStr(([Store].[USA].[OR]))",
"[Store].[USA].[OR]");
// Now, applied to a tuple
assertExprReturns(
"TupleToStr(([Marital Status], [Gender].[M]))",
"([Marital Status].[All Marital Status], [Gender].[M])");
// Applied to a tuple containing a null member
assertExprReturns(
"TupleToStr(([Marital Status], [Gender].Parent))",
"");
// Applied to a null member
assertExprReturns(
"TupleToStr([Marital Status].Parent)",
"");
}
/**
* Executes a scalar expression, and asserts that the result is as
* expected. For example, <code>assertExprReturns("1 + 2", "3")</code>
* should succeed.
*/
public void assertExprReturns(String expr, String expected) {
String actual = executeExpr(expr);
assertEquals(expected, actual);
}
/**
* Executes a scalar expression, and asserts that the result is within
* delta of the expected result.
*
* @param expr MDX scalar expression
* @param expected Expected value
* @param delta Maximum allowed deviation from expected value
*/
public void assertExprReturns(
String expr, double expected, double delta)
{
Object value = getTestContext().executeExprRaw(expr).getValue();
try {
double actual = ((Number) value).doubleValue();
if (Double.isNaN(expected) && Double.isNaN(actual)) {
return;
}
Assert.assertEquals(
null,
expected,
actual,
delta);
} catch (ClassCastException ex) {
String msg = "Actual value \"" + value + "\" is not a number.";
throw new ComparisonFailure(
msg, Double.toString(expected), String.valueOf(value));
}
}
/**
* Compiles a scalar expression, and asserts that the program looks as
* expected.
*/
public void assertExprCompilesTo(
String expr,
String expectedCalc)
{
final String actualCalc =
getTestContext().compileExpression(expr, true);
final int expDeps =
MondrianProperties.instance().TestExpDependencies.get();
if (expDeps > 0) {
// Don't bother checking the compiled output if we are also
// testing dependencies. The compiled code will have extra
// 'DependencyTestingCalc' instances embedded in it.
return;
}
TestContext.assertEqualsVerbose(expectedCalc, actualCalc);
}
/**
* Compiles a set expression, and asserts that the program looks as
* expected.
*/
public void assertAxisCompilesTo(
String expr,
String expectedCalc)
{
final String actualCalc =
getTestContext().compileExpression(expr, false);
final int expDeps =
MondrianProperties.instance().TestExpDependencies.get();
if (expDeps > 0) {
// Don't bother checking the compiled output if we are also
// testing dependencies. The compiled code will have extra
// 'DependencyTestingCalc' instances embedded in it.
return;
}
TestContext.assertEqualsVerbose(expectedCalc, actualCalc);
}
/**
* Tests the <code>Rank(member, set)</code> MDX function.
*/
public void testRank() {
// Member within set
assertExprReturns(
"Rank([Store].[USA].[CA], "
+ "{[Store].[USA].[OR],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA]})", "2");
// Member not in set
assertExprReturns(
"Rank([Store].[USA].[WA], "
+ "{[Store].[USA].[OR],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA]})", "0");
// Member not in empty set
assertExprReturns(
"Rank([Store].[USA].[WA], {})", "0");
// Null member not in set returns null.
assertExprReturns(
"Rank([Store].Parent, "
+ "{[Store].[USA].[OR],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA]})", "");
// Null member in empty set. (MSAS returns an error "Formula error -
// dimension count is not valid - in the Rank function" but I think
// null is the correct behavior.)
assertExprReturns(
"Rank([Gender].Parent, {})", "");
// Member occurs twice in set -- pick first
assertExprReturns(
"Rank([Store].[USA].[WA], \n"
+ "{[Store].[USA].[WA],"
+ " [Store].[USA].[CA],"
+ " [Store].[USA],"
+ " [Store].[USA].[WA]})", "1");
// Tuple not in set
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].[M]), \n"
+ "{([Gender].[F], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[M])})", "0");
// Tuple in set
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].[M]), \n"
+ "{([Gender].[F], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[S]),\n"
+ " ([Gender].[F], [Marital Status].[M])})", "3");
// Tuple not in empty set
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].[M]), \n" + "{})", "0");
// Partially null tuple in set, returns null
assertExprReturns(
"Rank(([Gender].[F], [Marital Status].Parent), \n"
+ "{([Gender].[F], [Marital Status].[S]),\n"
+ " ([Gender].[M], [Marital Status].[S]),\n"
+ " ([Gender].[F], [Marital Status].[M])})", "");
}
public void testRankWithExpr() {
// Note that [Good] and [Top Measure] have the same [Unit Sales]
// value (5), but [Good] ranks 1 and [Top Measure] ranks 2. Even though
// they are sorted descending on unit sales, they remain in their
// natural order (member name) because MDX sorts are stable.
assertQueryReturns(
"with member [Measures].[Sibling Rank] as ' Rank([Product].CurrentMember, [Product].CurrentMember.Siblings) '\n"
+ " member [Measures].[Sales Rank] as ' Rank([Product].CurrentMember, Order([Product].Parent.Children, [Measures].[Unit Sales], DESC)) '\n"
+ " member [Measures].[Sales Rank2] as ' Rank([Product].CurrentMember, [Product].Parent.Children, [Measures].[Unit Sales]) '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Sales Rank], [Measures].[Sales Rank2]} on columns,\n"
+ " {[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].children} on rows\n"
+ "from [Sales]\n"
+ "WHERE ([Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6])",
"Axis #0:\n"
+ "{[Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Sales Rank]}\n"
+ "{[Measures].[Sales Rank2]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Pearl]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Top Measure]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Walrus]}\n"
+ "Row #0: 5\n"
+ "Row #0: 1\n"
+ "Row #0: 1\n"
+ "Row #1: \n"
+ "Row #1: 5\n"
+ "Row #1: 5\n"
+ "Row #2: 3\n"
+ "Row #2: 3\n"
+ "Row #2: 3\n"
+ "Row #3: 5\n"
+ "Row #3: 2\n"
+ "Row #3: 1\n"
+ "Row #4: 3\n"
+ "Row #4: 4\n"
+ "Row #4: 3\n");
}
public void testRankMembersWithTiedExpr() {
assertQueryReturns(
"with "
+ " Set [Beers] as {[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].children} "
+ " member [Measures].[Sales Rank] as ' Rank([Product].CurrentMember, [Beers], [Measures].[Unit Sales]) '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Sales Rank]} on columns,\n"
+ " Generate([Beers], {[Product].CurrentMember}) on rows\n"
+ "from [Sales]\n"
+ "WHERE ([Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6])",
"Axis #0:\n"
+ "{[Store].[USA].[OR].[Portland].[Store 11], [Time].[1997].[Q2].[6]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Sales Rank]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Pearl]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Top Measure]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Walrus]}\n"
+ "Row #0: 5\n"
+ "Row #0: 1\n"
+ "Row #1: \n"
+ "Row #1: 5\n"
+ "Row #2: 3\n"
+ "Row #2: 3\n"
+ "Row #3: 5\n"
+ "Row #3: 1\n"
+ "Row #4: 3\n"
+ "Row #4: 3\n");
}
public void testRankTuplesWithTiedExpr() {
assertQueryReturns(
"with "
+ " Set [Beers for Store] as 'NonEmptyCrossJoin("
+ "[Product].[All Products].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].children, "
+ "{[Store].[USA].[OR].[Portland].[Store 11]})' "
+ " member [Measures].[Sales Rank] as ' Rank(([Product].CurrentMember,[Store].CurrentMember), [Beers for Store], [Measures].[Unit Sales]) '\n"
+ "select {[Measures].[Unit Sales], [Measures].[Sales Rank]} on columns,\n"
+ " Generate([Beers for Store], {([Product].CurrentMember, [Store].CurrentMember)}) on rows\n"
+ "from [Sales]\n"
+ "WHERE ([Time].[1997].[Q2].[6])",
"Axis #0:\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Sales Rank]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Good], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Portsmouth], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Top Measure], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "{[Product].[Drink].[Alcoholic Beverages].[Beer and Wine].[Beer].[Walrus], [Store].[USA].[OR].[Portland].[Store 11]}\n"
+ "Row #0: 5\n"
+ "Row #0: 1\n"
+ "Row #1: 3\n"
+ "Row #1: 3\n"
+ "Row #2: 5\n"
+ "Row #2: 1\n"
+ "Row #3: 3\n"
+ "Row #3: 3\n");
}
public void testRankWithExpr2() {
// Data: Unit Sales
// All gender 266,733
// F 131,558
// M 135,215
assertExprReturns(
"Rank([Gender].[All Gender],"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "1");
assertExprReturns(
"Rank([Gender].[F],"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "3");
assertExprReturns(
"Rank([Gender].[M],"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "2");
// Null member. Expression evaluates to null, therefore value does
// not appear in the list of values, therefore the rank is null.
assertExprReturns(
"Rank([Gender].[All Gender].Parent,"
+ " {[Gender].Members},"
+ " [Measures].[Unit Sales])", "");
// Empty set. Value would appear after all elements in the empty set,
// therefore rank is 1.
// Note that SSAS gives error 'The first argument to the Rank function,
// a tuple expression, should reference the same hierachies as the
// second argument, a set expression'. I think that's because it can't
// deduce a type for '{}'. SSAS's problem, not Mondrian's. :)
assertExprReturns(
"Rank([Gender].[M],"
+ " {},"
+ " [Measures].[Unit Sales])",
"1");
// As above, but SSAS can type-check this.
assertExprReturns(
"Rank([Gender].[M],"
+ " Filter(Gender.Members, 1 = 0),"
+ " [Measures].[Unit Sales])",
"1");
// Member is not in set
assertExprReturns(
"Rank([Gender].[M]," + " {[Gender].[All Gender], [Gender].[F]})",
"0");
// Even though M is not in the set, its value lies between [All Gender]
// and [F].
assertExprReturns(
"Rank([Gender].[M],"
+ " {[Gender].[All Gender], [Gender].[F]},"
+ " [Measures].[Unit Sales])", "2");
// Expr evaluates to null for some values of set.
assertExprReturns(
"Rank([Product].[Non-Consumable].[Household],"
+ " {[Product].[Food], [Product].[All Products], [Product].[Drink].[Dairy]},"
+ " [Product].CurrentMember.Parent)", "2");
// Expr evaluates to null for all values in the set.
assertExprReturns(
"Rank([Gender].[M],"
+ " {[Gender].[All Gender], [Gender].[F]},"
+ " [Marital Status].[All Marital Status].Parent)", "1");
}
/**
* Tests the 3-arg version of the RANK function with a value
* which returns null within a set of nulls.
*/
public void testRankWithNulls() {
assertQueryReturns(
"with member [Measures].[X] as "
+ "'iif([Measures].[Store Sales]=777,"
+ "[Measures].[Store Sales],Null)'\n"
+ "member [Measures].[Y] as 'Rank([Gender].[M],"
+ "{[Measures].[X],[Measures].[X],[Measures].[X]},"
+ " [Marital Status].[All Marital Status].Parent)'"
+ "select {[Measures].[Y]} on columns from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Y]}\n"
+ "Row #0: 1\n");
}
/**
* Tests a RANK function which is so large that we need to use caching
* in order to execute it efficiently.
*/
public void testRankHuge() {
// If caching is disabled, don't even try -- it will take too long.
if (!MondrianProperties.instance().EnableExpCache.get()) {
return;
}
checkRankHuge(
"WITH \n"
+ " MEMBER [Measures].[Rank among products] \n"
+ " AS ' Rank([Product].CurrentMember, "
+ " Order([Product].members, "
+ " [Measures].[Unit Sales], BDESC)) '\n"
+ "SELECT CrossJoin(\n"
+ " [Gender].members,\n"
+ " {[Measures].[Unit Sales],\n"
+ " [Measures].[Rank among products]}) ON COLUMNS,\n"
// + " {[Product], [Product].[All Products].[Non-Consumable].
// [Periodicals].[Magazines].[Sports Magazines].[Robust].
// [Robust Monthly Sports Magazine]} ON ROWS\n"
+ " {[Product].members} ON ROWS\n"
+ "FROM [Sales]",
false);
}
/**
* As {@link #testRankHuge()}, but for the 3-argument form of the
* <code>RANK</code> function.
*
* <p>Disabled by jhyde, 2006/2/14. Bug 1431316 logged.
*/
public void _testRank3Huge() {
// If caching is disabled, don't even try -- it will take too long.
if (!MondrianProperties.instance().EnableExpCache.get()) {
return;
}
checkRankHuge(
"WITH \n"
+ " MEMBER [Measures].[Rank among products] \n"
+ " AS ' Rank([Product].CurrentMember, [Product].members, [Measures].[Unit Sales]) '\n"
+ "SELECT CrossJoin(\n"
+ " [Gender].members,\n"
+ " {[Measures].[Unit Sales],\n"
+ " [Measures].[Rank among products]}) ON COLUMNS,\n"
+ " {[Product],"
+ " [Product].[All Products].[Non-Consumable].[Periodicals]"
+ ".[Magazines].[Sports Magazines].[Robust]"
+ ".[Robust Monthly Sports Magazine]} ON ROWS\n"
// + " {[Product].members} ON ROWS\n"
+ "FROM [Sales]",
true);
}
private void checkRankHuge(String query, boolean rank3) {
final Result result = getTestContext().executeQuery(query);
final Axis[] axes = result.getAxes();
final Axis rowsAxis = axes[1];
final int rowCount = rowsAxis.getPositions().size();
assertEquals(2256, rowCount);
// [All Products], [All Gender], [Rank]
Cell cell = result.getCell(new int[] {1, 0});
assertEquals("1", cell.getFormattedValue());
// [Robust Monthly Sports Magazine]
Member member = rowsAxis.getPositions().get(rowCount - 1).get(0);
assertEquals("Robust Monthly Sports Magazine", member.getName());
// [Robust Monthly Sports Magazine], [All Gender], [Rank]
cell = result.getCell(new int[] {0, rowCount - 1});
assertEquals("152", cell.getFormattedValue());
cell = result.getCell(new int[] {1, rowCount - 1});
assertEquals(rank3 ? "1,854" : "1,871", cell.getFormattedValue());
// [Robust Monthly Sports Magazine], [Gender].[F], [Rank]
cell = result.getCell(new int[] {2, rowCount - 1});
assertEquals("90", cell.getFormattedValue());
cell = result.getCell(new int[] {3, rowCount - 1});
assertEquals(rank3 ? "1,119" : "1,150", cell.getFormattedValue());
// [Robust Monthly Sports Magazine], [Gender].[M], [Rank]
cell = result.getCell(new int[] {4, rowCount - 1});
assertEquals("62", cell.getFormattedValue());
cell = result.getCell(new int[] {5, rowCount - 1});
assertEquals(rank3 ? "2,131" : "2,147", cell.getFormattedValue());
}
public void testLinRegPointQuarter() {
assertQueryReturns(
"WITH MEMBER [Measures].[Test] as \n"
+ " 'LinRegPoint(\n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members),\n"
+ " Descendants([Time].[1997], [Time].[Quarter]), \n"
+ "[Measures].[Store Sales], \n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members))' \n"
+ "SELECT \n"
+ "{[Measures].[Test],[Measures].[Store Sales]} ON ROWS, \n"
+ "{[Time].[1997].Children} ON COLUMNS \n"
+ "FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Test]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Row #0: 134,299.22\n"
+ "Row #0: 138,972.76\n"
+ "Row #0: 143,646.30\n"
+ "Row #0: 148,319.85\n"
+ "Row #1: 139,628.35\n"
+ "Row #1: 132,666.27\n"
+ "Row #1: 140,271.89\n"
+ "Row #1: 152,671.62\n");
}
/**
* Tests all of the linear regression functions, as suggested by
* <a href="http://support.microsoft.com/kb/q307276/">a Microsoft knowledge
* base article</a>.
*/
public void _testLinRegAll() {
// We have not implemented the LastPeriods function, so we use
// [Time].CurrentMember.Lag(9) : [Time].CurrentMember
// is equivalent to
// LastPeriods(10)
assertQueryReturns(
"WITH MEMBER \n"
+ "[Measures].[Intercept] AS \n"
+ " 'LinRegIntercept([Time].CurrentMember.Lag(10) : [Time].CurrentMember, [Measures].[Unit Sales], [Measures].[Store Sales])' \n"
+ "MEMBER [Measures].[Regression Slope] AS\n"
+ " 'LinRegSlope([Time].CurrentMember.Lag(9) : [Time].CurrentMember,[Measures].[Unit Sales],[Measures].[Store Sales]) '\n"
+ "MEMBER [Measures].[Predict] AS\n"
+ " 'LinRegPoint([Measures].[Unit Sales],[Time].CurrentMember.Lag(9) : [Time].CurrentMember,[Measures].[Unit Sales],[Measures].[Store Sales])',\n"
+ " FORMAT_STRING = 'Standard' \n"
+ "MEMBER [Measures].[Predict Formula] AS\n"
+ " '([Measures].[Regression Slope] * [Measures].[Unit Sales]) + [Measures].[Intercept]',\n"
+ " FORMAT_STRING='Standard'\n"
+ "MEMBER [Measures].[Good Fit] AS\n"
+ " 'LinRegR2([Time].CurrentMember.Lag(9) : [Time].CurrentMember, [Measures].[Unit Sales],[Measures].[Store Sales])',\n"
+ " FORMAT_STRING='#,#.00'\n"
+ "MEMBER [Measures].[Variance] AS\n"
+ " 'LinRegVariance([Time].CurrentMember.Lag(9) : [Time].CurrentMember,[Measures].[Unit Sales],[Measures].[Store Sales])'\n"
+ "SELECT \n"
+ " {[Measures].[Store Sales], \n"
+ " [Measures].[Intercept], \n"
+ " [Measures].[Regression Slope], \n"
+ " [Measures].[Predict], \n"
+ " [Measures].[Predict Formula], \n"
+ " [Measures].[Good Fit], \n"
+ " [Measures].[Variance] } ON COLUMNS, \n"
+ " Descendants([Time].[1997], [Time].[Month]) ON ROWS\n"
+ "FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Store Sales]}\n"
+ "{[Measures].[Intercept]}\n"
+ "{[Measures].[Regression Slope]}\n"
+ "{[Measures].[Predict]}\n"
+ "{[Measures].[Predict Formula]}\n"
+ "{[Measures].[Good Fit]}\n"
+ "{[Measures].[Variance]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Row #0: 45,539.69\n"
+ "Row #0: 68711.40\n"
+ "Row #0: -1.033\n"
+ "Row #0: 46,350.26\n"
+ "Row #0: 46.350.26\n"
+ "Row #0: -1.#INF\n"
+ "Row #0: 5.17E-08\n"
+ "...\n"
+ "Row #11: 15343.67\n");
}
public void testLinRegPointMonth() {
assertQueryReturns(
"WITH MEMBER \n"
+ "[Measures].[Test] as \n"
+ " 'LinRegPoint(\n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members),\n"
+ " Descendants([Time].[1997], [Time].[Month]), \n"
+ " [Measures].[Store Sales], \n"
+ " Rank(Time.[Time].CurrentMember, Time.[Time].CurrentMember.Level.Members)\n"
+ " )' \n"
+ "SELECT \n"
+ " {[Measures].[Test],[Measures].[Store Sales]} ON ROWS, \n"
+ " Descendants([Time].[1997], [Time].[Month]) ON COLUMNS \n"
+ "FROM Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997].[Q1].[1]}\n"
+ "{[Time].[1997].[Q1].[2]}\n"
+ "{[Time].[1997].[Q1].[3]}\n"
+ "{[Time].[1997].[Q2].[4]}\n"
+ "{[Time].[1997].[Q2].[5]}\n"
+ "{[Time].[1997].[Q2].[6]}\n"
+ "{[Time].[1997].[Q3].[7]}\n"
+ "{[Time].[1997].[Q3].[8]}\n"
+ "{[Time].[1997].[Q3].[9]}\n"
+ "{[Time].[1997].[Q4].[10]}\n"
+ "{[Time].[1997].[Q4].[11]}\n"
+ "{[Time].[1997].[Q4].[12]}\n"
+ "Axis #2:\n"
+ "{[Measures].[Test]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Row #0: 43,824.36\n"
+ "Row #0: 44,420.51\n"
+ "Row #0: 45,016.66\n"
+ "Row #0: 45,612.81\n"
+ "Row #0: 46,208.95\n"
+ "Row #0: 46,805.10\n"
+ "Row #0: 47,401.25\n"
+ "Row #0: 47,997.40\n"
+ "Row #0: 48,593.55\n"
+ "Row #0: 49,189.70\n"
+ "Row #0: 49,785.85\n"
+ "Row #0: 50,382.00\n"
+ "Row #1: 45,539.69\n"
+ "Row #1: 44,058.79\n"
+ "Row #1: 50,029.87\n"
+ "Row #1: 42,878.25\n"
+ "Row #1: 44,456.29\n"
+ "Row #1: 45,331.73\n"
+ "Row #1: 50,246.88\n"
+ "Row #1: 46,199.04\n"
+ "Row #1: 43,825.97\n"
+ "Row #1: 42,342.27\n"
+ "Row #1: 53,363.71\n"
+ "Row #1: 56,965.64\n");
}
public void testLinRegIntercept() {
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
-126.65,
0.50);
/*
-1#IND missing data
*/
/*
1#INF division by zero
*/
/*
The following table shows query return values from using different
FORMAT_STRING's in an expression involving 'division by zero' (tested on
Intel platforms):
+===========================+=====================+
| Format Strings | Query Return Values |
+===========================+=====================+
| FORMAT_STRING=" | 1.#INF |
+===========================+=====================+
| FORMAT_STRING='Standard' | 1.#J |
+===========================+=====================+
| FORMAT_STRING='Fixed' | 1.#J |
+===========================+=====================+
| FORMAT_STRING='Percent' | 1#I.NF% |
+===========================+=====================+
| FORMAT_STRING='Scientific'| 1.JE+00 |
+===========================+=====================+
*/
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegIntercept({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// first expr constant
if (false) {
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
}
// format does not add '$'
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
7.00,
0.01);
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegIntercept([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void testLinRegSlope() {
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
0.4746,
0.50);
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegSlope({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// first expr constant
if (false) {
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
}
// ^^^^
// copy and paste error
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
0.00,
0.01);
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegSlope([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void testLinRegPoint() {
// NOTE: mdx does not parse
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].CurrentMember[Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"0.4746");
}
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " {[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// Expected value is wrong
// zeroth expr constant
if (false) {
assertExprReturns(
"LinRegPoint(-1,"
+ " [Time].[Month].members,"
+ " 7, [Measures].[Store Sales])", "-127.124");
}
// first expr constant
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].[Month].members,"
+ " 7, [Measures].[Store Sales])", "$7.00");
}
// format does not add '$'
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
7.00,
0.01);
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegPoint([Measures].[Unit Sales],"
+ " [Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void _testLinRegR2() {
// Why would R2 equal the slope
if (false) {
assertExprReturns(
"LinRegR2([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"0.4746");
}
// Mondrian can not return "missing data" value -1.#IND
// empty set
if (false) {
assertExprReturns(
"LinRegR2({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
// first expr constant
assertExprReturns(
"LinRegR2([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
// Mondrian can not return "missing data" value -1.#IND
// second expr constant
if (false) {
assertExprReturns(
"LinRegR2([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
}
public void _testLinRegVariance() {
assertExprReturns(
"LinRegVariance([Time].[Month].members,"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"0.4746");
// empty set
assertExprReturns(
"LinRegVariance({[Time].Parent},"
+ " [Measures].[Unit Sales], [Measures].[Store Sales])",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
// first expr constant
assertExprReturns(
"LinRegVariance([Time].[Month].members,"
+ " 7, [Measures].[Store Sales])",
"$7.00");
// second expr constant
assertExprReturns(
"LinRegVariance([Time].[Month].members,"
+ " [Measures].[Unit Sales], 4)",
"-1.#IND"); // MSAS returns -1.#IND (whatever that means)
}
public void testVisualTotalsBasic() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// note that Subtotal - Bread only includes 2 displayed children
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,312\n"
+ "Row #1: 815\n"
+ "Row #2: 3,497\n");
}
public void testVisualTotalsConsecutively() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels].[Colony],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// Note that [Bagels] occurs 3 times, but only once does it
// become a subtotal. Note that the subtotal does not include
// the following [Bagels] member.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[*Subtotal - Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels].[Colony]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 5,290\n"
+ "Row #1: 815\n"
+ "Row #2: 163\n"
+ "Row #3: 163\n"
+ "Row #4: 815\n"
+ "Row #5: 3,497\n");
}
public void testVisualTotalsNoPattern() {
assertAxisReturns(
"VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]})",
// Note that the [Bread] visual member is just called [Bread].
"[Product].[Food].[Baked Goods].[Bread]\n"
+ "[Product].[Food].[Baked Goods].[Bread].[Bagels]\n"
+ "[Product].[Food].[Baked Goods].[Bread].[Muffins]");
}
public void testVisualTotalsWithFilter() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{Filter("
+ " VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\"),"
+ "[Measures].[Unit Sales] > 3400)} on rows "
+ "from [Sales]",
// Note that [*Subtotal - Bread] still contains the
// contribution of [Bagels] 815, which was filtered out.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,312\n"
+ "Row #1: 3,497\n");
}
public void testVisualTotalsNested() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " Filter("
+ " VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\"),"
+ " [Measures].[Unit Sales] > 3400),"
+ " \"Second total - *\")} on rows "
+ "from [Sales]",
// Yields the same -- no extra total.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,312\n"
+ "Row #1: 3,497\n");
}
public void testVisualTotalsFilterInside() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " Filter("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " [Measures].[Unit Sales] > 3400),"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 3,497\n"
+ "Row #1: 3,497\n");
}
public void testVisualTotalsOutOfOrder() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// Note that [*Subtotal - Bread] 3497 does not include 815 for
// bagels.
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 815\n"
+ "Row #1: 3,497\n"
+ "Row #2: 3,497\n");
}
public void testVisualTotalsGrandparentsAndOutOfOrder() {
assertQueryReturns(
"select {[Measures].[Unit Sales]} on columns, "
+ "{VisualTotals("
+ " {[Product].[All Products].[Food],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],"
+ " [Product].[All Products].[Food].[Frozen Foods].[Breakfast Foods],"
+ " [Product].[All Products].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Golden],"
+ " [Product].[All Products].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Big Time],"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},"
+ " \"**Subtotal - *\")} on rows "
+ "from [Sales]",
// Note:
// [*Subtotal - Food] = 4513 = 815 + 311 + 3497
// [*Subtotal - Bread] = 815, does not include muffins
// [*Subtotal - Breakfast Foods] = 311 = 110 + 201, includes
// grandchildren
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Product].[*Subtotal - Food]}\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Frozen Foods].[*Subtotal - Breakfast Foods]}\n"
+ "{[Product].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Golden]}\n"
+ "{[Product].[Food].[Frozen Foods].[Breakfast Foods].[Pancake Mix].[Big Time]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 4,623\n"
+ "Row #1: 815\n"
+ "Row #2: 815\n"
+ "Row #3: 311\n"
+ "Row #4: 110\n"
+ "Row #5: 201\n"
+ "Row #6: 3,497\n");
}
public void testVisualTotalsCrossjoin() {
assertAxisThrows(
"VisualTotals(Crossjoin([Gender].Members, [Store].children))",
"Argument to 'VisualTotals' function must be a set of members; got set of tuples.");
}
/**
* Test case for bug
* <a href="http://jira.pentaho.com/browse/MONDRIAN-615">MONDRIAN-615</a>,
* "VisualTotals doesn't work for the all member".
*/
public void testVisualTotalsAll() {
final String query =
"SELECT \n"
+ " {[Measures].[Unit Sales]} ON 0, \n"
+ " VisualTotals(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[USA],\n"
+ " [Customers].[USA].[CA],\n"
+ " [Customers].[USA].[OR]}) ON 1\n"
+ "FROM [Sales]";
assertQueryReturns(
query,
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "Row #0: 142,407\n"
+ "Row #1: 142,407\n"
+ "Row #2: 74,748\n"
+ "Row #3: 67,659\n");
// Check captions
final Result result = getTestContext().executeQuery(query);
final List<Position> positionList = result.getAxes()[1].getPositions();
assertEquals("All Customers", positionList.get(0).get(0).getCaption());
assertEquals("USA", positionList.get(1).get(0).getCaption());
assertEquals("CA", positionList.get(2).get(0).getCaption());
}
/**
* Test case involving a named set and query pivoted. Suggested in
* <a href="http://jira.pentaho.com/browse/MONDRIAN-615">MONDRIAN-615</a>,
* "VisualTotals doesn't work for the all member".
*/
public void testVisualTotalsWithNamedSetAndPivot() {
assertQueryReturns(
"WITH SET [CA_OR] AS\n"
+ " VisualTotals(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[USA],\n"
+ " [Customers].[USA].[CA],\n"
+ " [Customers].[USA].[OR]})\n"
+ "SELECT \n"
+ " Drilldownlevel({[Time].[1997]}) ON 0, \n"
+ " [CA_OR] ON 1 \n"
+ "FROM [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "Axis #2:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "Row #0: 142,407\n"
+ "Row #0: 36,177\n"
+ "Row #0: 33,131\n"
+ "Row #0: 35,310\n"
+ "Row #0: 37,789\n"
+ "Row #1: 142,407\n"
+ "Row #1: 36,177\n"
+ "Row #1: 33,131\n"
+ "Row #1: 35,310\n"
+ "Row #1: 37,789\n"
+ "Row #2: 74,748\n"
+ "Row #2: 16,890\n"
+ "Row #2: 18,052\n"
+ "Row #2: 18,370\n"
+ "Row #2: 21,436\n"
+ "Row #3: 67,659\n"
+ "Row #3: 19,287\n"
+ "Row #3: 15,079\n"
+ "Row #3: 16,940\n"
+ "Row #3: 16,353\n");
// same query, swap axes
assertQueryReturns(
"WITH SET [CA_OR] AS\n"
+ " VisualTotals(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[USA],\n"
+ " [Customers].[USA].[CA],\n"
+ " [Customers].[USA].[OR]})\n"
+ "SELECT \n"
+ " [CA_OR] ON 0,\n"
+ " Drilldownlevel({[Time].[1997]}) ON 1\n"
+ "FROM [Sales] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "Axis #2:\n"
+ "{[Time].[1997]}\n"
+ "{[Time].[1997].[Q1]}\n"
+ "{[Time].[1997].[Q2]}\n"
+ "{[Time].[1997].[Q3]}\n"
+ "{[Time].[1997].[Q4]}\n"
+ "Row #0: 142,407\n"
+ "Row #0: 142,407\n"
+ "Row #0: 74,748\n"
+ "Row #0: 67,659\n"
+ "Row #1: 36,177\n"
+ "Row #1: 36,177\n"
+ "Row #1: 16,890\n"
+ "Row #1: 19,287\n"
+ "Row #2: 33,131\n"
+ "Row #2: 33,131\n"
+ "Row #2: 18,052\n"
+ "Row #2: 15,079\n"
+ "Row #3: 35,310\n"
+ "Row #3: 35,310\n"
+ "Row #3: 18,370\n"
+ "Row #3: 16,940\n"
+ "Row #4: 37,789\n"
+ "Row #4: 37,789\n"
+ "Row #4: 21,436\n"
+ "Row #4: 16,353\n");
}
/**
* Tests that members generated by VisualTotals have correct identity.
*
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-295">
* bug MONDRIAN-295, "Query generated by Excel 2007 gives incorrect
* results"</a>.
*/
public void testVisualTotalsIntersect() {
assertQueryReturns(
"WITH\n"
+ "SET [XL_Row_Dim_0] AS 'VisualTotals(Distinct(Hierarchize({Ascendants([Customers].[All Customers].[USA]), Descendants([Customers].[All Customers].[USA])})))' \n"
+ "SELECT \n"
+ "NON EMPTY Hierarchize({[Time].[Year].members}) ON COLUMNS , \n"
+ "NON EMPTY Hierarchize(Intersect({DrilldownLevel({[Customers].[All Customers]})}, [XL_Row_Dim_0])) ON ROWS \n"
+ "FROM [Sales] \n"
+ "WHERE ([Measures].[Store Sales])",
"Axis #0:\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #1:\n"
+ "{[Time].[1997]}\n"
+ "Axis #2:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "Row #0: 565,238.13\n"
+ "Row #1: 565,238.13\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-668">
* bug MONDRIAN-668, "Intersect should return any VisualTotals members in
* right-hand set"</a>.
*/
public void testVisualTotalsWithNamedSetAndPivotSameAxis() {
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[USA]})},\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [Sales] "
+ "where [Measures].[Sales count]\n",
"Axis #0:\n"
+ "{[Measures].[Sales Count]}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "Row #0: 24,442\n"
+ "Row #0: 24,442\n");
// now with tuples
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " [Marital Status].[M]\n"
+ " * {DrilldownLevel({[Store].[USA]})}\n"
+ " * [Gender].[F],\n"
+ " [Marital Status].[M]\n"
+ " * [XL_Row_Dim_0]\n"
+ " * [Gender].[F])) ON COLUMNS\n"
+ "from [Sales] "
+ "where [Measures].[Sales count]\n",
"Axis #0:\n"
+ "{[Measures].[Sales Count]}\n"
+ "Axis #1:\n"
+ "{[Marital Status].[M], [Store].[USA], [Gender].[F]}\n"
+ "{[Marital Status].[M], [Store].[USA].[CA], [Gender].[F]}\n"
+ "Row #0: 6,054\n"
+ "Row #0: 6,054\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-682">
* bug MONDRIAN-682, "VisualTotals + Distinct-count measure gives wrong
* results"</a>.
*/
public void testVisualTotalsDistinctCountMeasure() {
// distinct measure
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[All Stores]})},\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [HR] "
+ "where [Measures].[Number of Employees]\n",
"Axis #0:\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 193\n"
+ "Row #0: 193\n");
// distinct measure
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA].[Beverly Hills]),\n"
+ " Descendants([Store].[USA].[CA].[Beverly Hills]),\n"
+ " Ascendants([Store].[USA].[CA].[Los Angeles]),\n"
+ " Descendants([Store].[USA].[CA].[Los Angeles])})))"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[All Stores]})},\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [HR] "
+ "where [Measures].[Number of Employees]\n",
"Axis #0:\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 110\n"
+ "Row #0: 110\n");
// distinct measure on columns
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select {[Measures].[Count], [Measures].[Number of Employees]} on COLUMNS,"
+ " NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " {DrilldownLevel({[Store].[All Stores]})},\n"
+ " [XL_Row_Dim_0])) ON ROWS\n"
+ "from [HR] ",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Count]}\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #2:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 2,316\n"
+ "Row #0: 193\n"
+ "Row #1: 2,316\n"
+ "Row #1: 193\n");
// distinct measure with tuples
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[CA]),\n"
+ " Descendants([Store].[USA].[CA])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " [Marital Status].[M]\n"
+ " * {DrilldownLevel({[Store].[USA]})}\n"
+ " * [Gender].[F],\n"
+ " [Marital Status].[M]\n"
+ " * [XL_Row_Dim_0]\n"
+ " * [Gender].[F])) ON COLUMNS\n"
+ "from [Sales] "
+ "where [Measures].[Customer count]\n",
"Axis #0:\n"
+ "{[Measures].[Customer Count]}\n"
+ "Axis #1:\n"
+ "{[Marital Status].[M], [Store].[USA], [Gender].[F]}\n"
+ "{[Marital Status].[M], [Store].[USA].[CA], [Gender].[F]}\n"
+ "Row #0: 654\n"
+ "Row #0: 654\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-761">
* bug MONDRIAN-761, "VisualTotalMember cannot be cast to
* RolapCubeMember"</a>.
*/
public void testVisualTotalsClassCast() {
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Store].[USA].[WA].[Yakima]), \n"
+ " Descendants([Store].[USA].[WA].[Yakima]), \n"
+ " Ascendants([Store].[USA].[WA].[Walla Walla]), \n"
+ " Descendants([Store].[USA].[WA].[Walla Walla]), \n"
+ " Ascendants([Store].[USA].[WA].[Tacoma]), \n"
+ " Descendants([Store].[USA].[WA].[Tacoma]), \n"
+ " Ascendants([Store].[USA].[WA].[Spokane]), \n"
+ " Descendants([Store].[USA].[WA].[Spokane]), \n"
+ " Ascendants([Store].[USA].[WA].[Seattle]), \n"
+ " Descendants([Store].[USA].[WA].[Seattle]), \n"
+ " Ascendants([Store].[USA].[WA].[Bremerton]), \n"
+ " Descendants([Store].[USA].[WA].[Bremerton]), \n"
+ " Ascendants([Store].[USA].[OR]), \n"
+ " Descendants([Store].[USA].[OR])}))) \n"
+ " SELECT NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " DrilldownMember(\n"
+ " {{DrilldownMember(\n"
+ " {{DrilldownMember(\n"
+ " {{DrilldownLevel(\n"
+ " {[Store].[All Stores]})}},\n"
+ " {[Store].[USA]})}},\n"
+ " {[Store].[USA].[WA]})}},\n"
+ " {[Store].[USA].[WA].[Bremerton]}),\n"
+ " [XL_Row_Dim_0]))\n"
+ "DIMENSION PROPERTIES \n"
+ " PARENT_UNIQUE_NAME, \n"
+ " [Store].[Store Name].[Store Type],\n"
+ " [Store].[Store Name].[Store Manager],\n"
+ " [Store].[Store Name].[Store Sqft],\n"
+ " [Store].[Store Name].[Grocery Sqft],\n"
+ " [Store].[Store Name].[Frozen Sqft],\n"
+ " [Store].[Store Name].[Meat Sqft],\n"
+ " [Store].[Store Name].[Has coffee bar],\n"
+ " [Store].[Store Name].[Street address] ON COLUMNS \n"
+ "FROM [HR]\n"
+ "WHERE \n"
+ " ([Measures].[Number of Employees])\n"
+ "CELL PROPERTIES\n"
+ " VALUE,\n"
+ " FORMAT_STRING,\n"
+ " LANGUAGE,\n"
+ " BACK_COLOR,\n"
+ " FORE_COLOR,\n"
+ " FONT_FLAGS",
"Axis #0:\n"
+ "{[Measures].[Number of Employees]}\n"
+ "Axis #1:\n"
+ "{[Store].[All Stores]}\n"
+ "{[Store].[USA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[WA].[Bremerton]}\n"
+ "{[Store].[USA].[WA].[Bremerton].[Store 3]}\n"
+ "{[Store].[USA].[WA].[Seattle]}\n"
+ "{[Store].[USA].[WA].[Spokane]}\n"
+ "{[Store].[USA].[WA].[Tacoma]}\n"
+ "{[Store].[USA].[WA].[Walla Walla]}\n"
+ "{[Store].[USA].[WA].[Yakima]}\n"
+ "Row #0: 419\n"
+ "Row #0: 419\n"
+ "Row #0: 136\n"
+ "Row #0: 283\n"
+ "Row #0: 62\n"
+ "Row #0: 62\n"
+ "Row #0: 62\n"
+ "Row #0: 62\n"
+ "Row #0: 74\n"
+ "Row #0: 4\n"
+ "Row #0: 19\n");
}
/**
* <p>Testcase for <a href="http://jira.pentaho.com/browse/MONDRIAN-678">
* bug MONDRIAN-678, "VisualTotals gives UnsupportedOperationException
* calling getOrdinal"</a>. Key difference from previous test is that there
* are multiple hierarchies in Named set.
*/
public void testVisualTotalsWithNamedSetOfTuples() {
assertQueryReturns(
"WITH SET [XL_Row_Dim_0] AS\n"
+ " VisualTotals(\n"
+ " Distinct(\n"
+ " Hierarchize(\n"
+ " {Ascendants([Customers].[All Customers].[USA].[CA].[Beverly Hills].[Ari Tweten]),\n"
+ " Descendants([Customers].[All Customers].[USA].[CA].[Beverly Hills].[Ari Tweten]),\n"
+ " Ascendants([Customers].[All Customers].[Mexico]),\n"
+ " Descendants([Customers].[All Customers].[Mexico])})))\n"
+ "select NON EMPTY \n"
+ " Hierarchize(\n"
+ " Intersect(\n"
+ " (DrilldownMember(\n"
+ " {{DrilldownMember(\n"
+ " {{DrilldownLevel(\n"
+ " {[Customers].[All Customers]})}},\n"
+ " {[Customers].[All Customers].[USA]})}},\n"
+ " {[Customers].[All Customers].[USA].[CA]})),\n"
+ " [XL_Row_Dim_0])) ON COLUMNS\n"
+ "from [Sales]\n"
+ "where [Measures].[Sales count]\n",
"Axis #0:\n"
+ "{[Measures].[Sales Count]}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[CA].[Beverly Hills]}\n"
+ "Row #0: 4\n"
+ "Row #0: 4\n"
+ "Row #0: 4\n"
+ "Row #0: 4\n");
}
public void testVisualTotalsLevel() {
Result result = getTestContext().executeQuery(
"select {[Measures].[Unit Sales]} on columns,\n"
+ "{[Product].[All Products],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " VisualTotals(\n"
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},\n"
+ " \"**Subtotal - *\")} on rows\n"
+ "from [Sales]");
final List<Position> rowPos = result.getAxes()[1].getPositions();
final Member member0 = rowPos.get(0).get(0);
assertEquals("All Products", member0.getName());
assertEquals("(All)", member0.getLevel().getName());
final Member member1 = rowPos.get(1).get(0);
assertEquals("Bread", member1.getName());
assertEquals("Product Category", member1.getLevel().getName());
final Member member2 = rowPos.get(2).get(0);
assertEquals("*Subtotal - Bread", member2.getName());
assertEquals("Product Category", member2.getLevel().getName());
final Member member3 = rowPos.get(3).get(0);
assertEquals("Bagels", member3.getName());
assertEquals("Product Subcategory", member3.getLevel().getName());
final Member member4 = rowPos.get(4).get(0);
assertEquals("Muffins", member4.getName());
assertEquals("Product Subcategory", member4.getLevel().getName());
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-749">
* MONDRIAN-749, "Cannot use visual totals members in calculations"</a>.
*
* <p>The bug is not currently fixed, so it is a negative test case. Row #2
* cell #1 contains an exception, but should be "**Subtotal - Bread :
* Product Subcategory".
*/
public void testVisualTotalsMemberInCalculation() {
getTestContext().assertQueryReturns(
"with member [Measures].[Foo] as\n"
+ " [Product].CurrentMember.Name || ' : ' || [Product].Level.Name\n"
+ "select {[Measures].[Unit Sales], [Measures].[Foo]} on columns,\n"
+ "{[Product].[All Products],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " VisualTotals(\n"
+ " {[Product].[All Products].[Food].[Baked Goods].[Bread],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Bagels],\n"
+ " [Product].[All Products].[Food].[Baked Goods].[Bread].[Muffins]},\n"
+ " \"**Subtotal - *\")} on rows\n"
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Foo]}\n"
+ "Axis #2:\n"
+ "{[Product].[All Products]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[*Subtotal - Bread]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Bagels]}\n"
+ "{[Product].[Food].[Baked Goods].[Bread].[Muffins]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: All Products : (All)\n"
+ "Row #1: 7,870\n"
+ "Row #1: Bread : Product Category\n"
+ "Row #2: 4,312\n"
+ "Row #2: #ERR: mondrian.olap.fun.MondrianEvaluationException: Could not find an aggregator in the current evaluation context\n"
+ "Row #3: 815\n"
+ "Row #3: Bagels : Product Subcategory\n"
+ "Row #4: 3,497\n"
+ "Row #4: Muffins : Product Subcategory\n");
}
public void testCalculatedChild() {
// Construct calculated children with the same name for both [Drink] and
// [Non-Consumable]. Then, create a metric to select the calculated
// child based on current product member.
assertQueryReturns(
"with\n"
+ " member [Product].[All Products].[Drink].[Calculated Child] as '[Product].[All Products].[Drink].[Alcoholic Beverages]'\n"
+ " member [Product].[All Products].[Non-Consumable].[Calculated Child] as '[Product].[All Products].[Non-Consumable].[Carousel]'\n"
+ " member [Measures].[Unit Sales CC] as '([Measures].[Unit Sales],[Product].currentmember.CalculatedChild(\"Calculated Child\"))'\n"
+ " select non empty {[Measures].[Unit Sales CC]} on columns,\n"
+ " non empty {[Product].[Drink], [Product].[Non-Consumable]} on rows\n"
+ " from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales CC]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 6,838\n" // Calculated child for [Drink]
+ "Row #1: 841\n"); // Calculated child for [Non-Consumable]
Member member = executeSingletonAxis(
"[Product].[All Products].CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCalculatedChildUsingItem() {
// Construct calculated children with the same name for both [Drink] and
// [Non-Consumable]. Then, create a metric to select the first
// calculated child.
assertQueryReturns(
"with\n"
+ " member [Product].[All Products].[Drink].[Calculated Child] as '[Product].[All Products].[Drink].[Alcoholic Beverages]'\n"
+ " member [Product].[All Products].[Non-Consumable].[Calculated Child] as '[Product].[All Products].[Non-Consumable].[Carousel]'\n"
+ " member [Measures].[Unit Sales CC] as '([Measures].[Unit Sales],AddCalculatedMembers([Product].currentmember.children).Item(\"Calculated Child\"))'\n"
+ " select non empty {[Measures].[Unit Sales CC]} on columns,\n"
+ " non empty {[Product].[Drink], [Product].[Non-Consumable]} on rows\n"
+ " from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales CC]}\n"
+ "Axis #2:\n"
+ "{[Product].[Drink]}\n"
+ "{[Product].[Non-Consumable]}\n"
+ "Row #0: 6,838\n"
// Note: For [Non-Consumable], the calculated child for [Drink] was
// selected!
+ "Row #1: 6,838\n");
Member member = executeSingletonAxis(
"[Product].[All Products].CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCalculatedChildOnMemberWithNoChildren() {
Member member =
executeSingletonAxis(
"[Measures].[Store Sales].CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCalculatedChildOnNullMember() {
Member member =
executeSingletonAxis(
"[Measures].[Store Sales].parent.CalculatedChild(\"foobar\")");
Assert.assertEquals(member, null);
}
public void testCast() {
// NOTE: Some of these tests fail with 'cannot convert ...', and they
// probably shouldn't. Feel free to fix the conversion.
// -- jhyde, 2006/9/3
// From integer
// To integer (trivial)
assertExprReturns("0 + Cast(1 + 2 AS Integer)", "3");
// To String
assertExprReturns("'' || Cast(1 + 2 AS String)", "3.0");
// To Boolean
assertExprReturns("1=1 AND Cast(1 + 2 AS Boolean)", "true");
assertExprReturns("1=1 AND Cast(1 - 1 AS Boolean)", "false");
// From boolean
// To String
assertExprReturns("'' || Cast((1 = 1 AND 1 = 2) AS String)", "false");
// This case demonstrates the relative precedence of 'AS' in 'CAST'
// and 'AS' for creating inline named sets. See also bug MONDRIAN-648.
Util.discard(Bug.BugMondrian648Fixed);
assertExprReturns(
"'xxx' || Cast(1 = 1 AND 1 = 2 AS String)",
"xxxfalse");
// To boolean (trivial)
assertExprReturns(
"1=1 AND Cast((1 = 1 AND 1 = 2) AS Boolean)",
"false");
assertExprReturns(
"1=1 OR Cast(1 = 1 AND 1 = 2 AS Boolean)",
"true");
// From null : should not throw exceptions since RolapResult.executeBody
// can receive NULL values when the cell value is not loaded yet, so
// should return null instead.
// To Integer : Expect to return NULL
// Expect to return NULL
assertExprReturns("0 * Cast(NULL AS Integer)", "");
// To Numeric : Expect to return NULL
// Expect to return NULL
assertExprReturns("0 * Cast(NULL AS Numeric)", "");
// To String : Expect to return "null"
assertExprReturns("'' || Cast(NULL AS String)", "null");
// To Boolean : Expect to return NULL, but since FunUtil.BooleanNull
// does not implement three-valued boolean logic yet, this will return
// false
assertExprReturns("1=1 AND Cast(NULL AS Boolean)", "false");
// Double is not allowed as a type
assertExprThrows(
"Cast(1 AS Double)",
"Unknown type 'Double'; values are NUMERIC, STRING, BOOLEAN");
// An integer constant is not allowed as a type
assertExprThrows(
"Cast(1 AS 5)",
"Syntax error at line 1, column 11, token '5'");
assertExprReturns("Cast('tr' || 'ue' AS boolean)", "true");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-524">
* MONDRIAN-524, "VB functions: expected primitive type, got
* java.lang.Object"</a>.
*/
public void testCastBug524() {
assertExprReturns(
"Cast(Int([Measures].[Store Sales] / 3600) as String)",
"157");
}
/**
* Tests {@link mondrian.olap.FunTable#getFunInfoList()}, but more
* importantly, generates an HTML table of all implemented functions into
* a file called "functions.html". You can manually include that table
* in the <a href="{@docRoot}/../mdx.html">MDX
* specification</a>.
*/
public void testDumpFunctions() throws IOException {
final List<FunInfo> funInfoList = new ArrayList<FunInfo>();
funInfoList.addAll(BuiltinFunTable.instance().getFunInfoList());
// Add some UDFs.
funInfoList.add(
new FunInfo(
new UdfResolver(
new UdfResolver.ClassUdfFactory(
CurrentDateMemberExactUdf.class,
null))));
funInfoList.add(
new FunInfo(
new UdfResolver(
new UdfResolver.ClassUdfFactory(
CurrentDateMemberUdf.class,
null))));
funInfoList.add(
new FunInfo(
new UdfResolver(
new UdfResolver.ClassUdfFactory(
CurrentDateStringUdf.class,
null))));
Collections.sort(funInfoList);
final File file = new File("functions.html");
final FileOutputStream os = new FileOutputStream(file);
final PrintWriter pw = new PrintWriter(os);
pw.println("<table border='1'>");
pw.println("<tr>");
pw.println("<td><b>Name</b></td>");
pw.println("<td><b>Description</b></td>");
pw.println("</tr>");
for (FunInfo funInfo : funInfoList) {
pw.println("<tr>");
pw.print(" <td valign=top><code>");
printHtml(pw, funInfo.getName());
pw.println("</code></td>");
pw.print(" <td>");
if (funInfo.getDescription() != null) {
printHtml(pw, funInfo.getDescription());
}
pw.println();
final String[] signatures = funInfo.getSignatures();
if (signatures != null) {
pw.println(" <h1>Syntax</h1>");
for (int j = 0; j < signatures.length; j++) {
if (j > 0) {
pw.println("<br/>");
}
String signature = signatures[j];
pw.print(" ");
printHtml(pw, signature);
}
pw.println();
}
pw.println(" </td>");
pw.println("</tr>");
}
pw.println("</table>");
pw.close();
}
public void testComplexOrExpr()
{
switch (TestContext.instance().getDialect().getDatabaseProduct()) {
case INFOBRIGHT:
// Skip this test on Infobright, because [Promotion Sales] is
// defined wrong.
return;
}
// make sure all aggregates referenced in the OR expression are
// processed in a single load request by setting the eval depth to
// a value smaller than the number of measures
int origDepth = MondrianProperties.instance().MaxEvalDepth.get();
MondrianProperties.instance().MaxEvalDepth.set(3);
assertQueryReturns(
"with set [*NATIVE_CJ_SET] as '[Store].[Store Country].members' "
+ "set [*GENERATED_MEMBERS_Measures] as "
+ " '{[Measures].[Unit Sales], [Measures].[Store Cost], "
+ " [Measures].[Sales Count], [Measures].[Customer Count], "
+ " [Measures].[Promotion Sales]}' "
+ "set [*GENERATED_MEMBERS] as "
+ " 'Generate([*NATIVE_CJ_SET], {[Store].CurrentMember})' "
+ "member [Store].[*SUBTOTAL_MEMBER_SEL~SUM] as 'Sum([*GENERATED_MEMBERS])' "
+ "select [*GENERATED_MEMBERS_Measures] ON COLUMNS, "
+ "NON EMPTY "
+ " Filter("
+ " Generate("
+ " [*NATIVE_CJ_SET], "
+ " {[Store].CurrentMember}), "
+ " (((((NOT IsEmpty([Measures].[Unit Sales])) OR "
+ " (NOT IsEmpty([Measures].[Store Cost]))) OR "
+ " (NOT IsEmpty([Measures].[Sales Count]))) OR "
+ " (NOT IsEmpty([Measures].[Customer Count]))) OR "
+ " (NOT IsEmpty([Measures].[Promotion Sales])))) "
+ "on rows "
+ "from [Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Sales Count]}\n"
+ "{[Measures].[Customer Count]}\n"
+ "{[Measures].[Promotion Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 225,627.23\n"
+ "Row #0: 86,837\n"
+ "Row #0: 5,581\n"
+ "Row #0: 151,211.21\n");
MondrianProperties.instance().MaxEvalDepth.set(origDepth);
}
public void testLeftFunctionWithValidArguments() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 4)=\"Bell\") on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithLengthValueZero() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 0)=\"\" And "
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\") on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithLengthValueEqualToStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 10)=\"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithLengthMoreThanStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, 20)=\"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithZeroLengthString() {
assertQueryReturns(
"select filter([Store].MEMBERS,Left(\"\", 20)=\"\" "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLeftFunctionWithNegativeLength() {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "Left([Store].CURRENTMEMBER.Name, -20)=\"Bellingham\") "
+ "on 0 from sales",
Util.IBM_JVM
? "StringIndexOutOfBoundsException: null"
: "StringIndexOutOfBoundsException: String index out of range: "
+ "-20");
}
public void testMidFunctionWithValidArguments() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 4, 6) = \"lingha\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithZeroLengthStringArgument() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"\", 4, 6) = \"\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithLengthArgumentLargerThanStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 4, 20) = \"lingham\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithStartIndexGreaterThanStringLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 20, 2) = \"\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithStartIndexZeroFails() {
// Note: SSAS 2005 treats start<=0 as 1, therefore gives different
// result for this query. We favor the VBA spec over SSAS 2005.
if (Bug.Ssas2005Compatible) {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 0, 2) = \"Be\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
} else {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 0, 2) = \"Be\")"
+ "on 0 from sales",
"Invalid parameter. Start parameter of Mid function must be "
+ "positive");
}
}
public void testMidFunctionWithStartIndexOne() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 1, 2) = \"Be\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testMidFunctionWithNegativeStartIndex() {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", -20, 2) = \"\")"
+ "on 0 from sales",
"Invalid parameter. "
+ "Start parameter of Mid function must be positive");
}
public void testMidFunctionWithNegativeLength() {
assertQueryThrows(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 2, -2) = \"\")"
+ "on 0 from sales",
"Invalid parameter. "
+ "Length parameter of Mid function must be non-negative");
}
public void testMidFunctionWithoutLength() {
assertQueryReturns(
"select filter([Store].MEMBERS,"
+ "[Store].CURRENTMEMBER.Name = \"Bellingham\""
+ "And Mid(\"Bellingham\", 2) = \"ellingham\")"
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLenFunctionWithNonEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS, "
+ "Len([Store].CURRENTMEMBER.Name) = 3) on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n");
}
public void testLenFunctionWithAnEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS,Len(\"\")=0 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testLenFunctionWithNullString() {
// SSAS2005 returns 0
assertQueryReturns(
"with member [Measures].[Foo] as ' NULL '\n"
+ " member [Measures].[Bar] as ' len([Measures].[Foo]) '\n"
+ "select [Measures].[Bar] on 0\n"
+ "from [Warehouse and Sales]",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Bar]}\n"
+ "Row #0: 0\n");
// same, but inline
assertExprReturns("len(null)", 0, 0);
}
public void testUCaseWithNonEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS, "
+ " UCase([Store].CURRENTMEMBER.Name) = \"BELLINGHAM\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testUCaseWithEmptyString() {
assertQueryReturns(
"select filter([Store].MEMBERS, "
+ " UCase(\"\") = \"\" "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testInStrFunctionWithValidArguments() {
assertQueryReturns(
"select filter([Store].MEMBERS,InStr(\"Bellingham\", \"ingha\")=5 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void
testIifFWithBooleanBooleanAndNumericParameterForReturningTruePart()
{
assertQueryReturns(
"SELECT Filter(Store.allmembers, "
+ "iif(measures.profit < 400000,"
+ "[store].currentMember.NAME = \"USA\", 0)) on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n");
}
public void
testIifWithBooleanBooleanAndNumericParameterForReturningFalsePart()
{
assertQueryReturns(
"SELECT Filter([Store].[USA].[CA].[Beverly Hills].children, "
+ "iif(measures.profit > 400000,"
+ "[store].currentMember.NAME = \"USA\", 1)) on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[CA].[Beverly Hills].[Store 6]}\n"
+ "Row #0: 21,333\n");
}
public void testIIFWithBooleanBooleanAndNumericParameterForReturningZero() {
assertQueryReturns(
"SELECT Filter(Store.allmembers, "
+ "iif(measures.profit > 400000,"
+ "[store].currentMember.NAME = \"USA\", 0)) on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testInStrFunctionWithEmptyString1() {
assertQueryReturns(
"select filter([Store].MEMBERS,InStr(\"\", \"ingha\")=0 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testInStrFunctionWithEmptyString2() {
assertQueryReturns(
"select filter([Store].MEMBERS,InStr(\"Bellingham\", \"\")=1 "
+ "And [Store].CURRENTMEMBER.Name = \"Bellingham\") "
+ "on 0 from sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA].[WA].[Bellingham]}\n"
+ "Row #0: 2,237\n");
}
public void testGetCaptionUsingMemberDotCaption() {
assertQueryReturns(
"SELECT Filter(Store.allmembers, "
+ "[store].currentMember.caption = \"USA\") on 0 FROM SALES",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Store].[USA]}\n"
+ "Row #0: 266,773\n");
}
private static void printHtml(PrintWriter pw, String s) {
final String escaped = StringEscaper.htmlEscaper.escapeString(s);
pw.print(escaped);
}
public void testCache() {
// test various data types: integer, string, member, set, tuple
assertExprReturns("Cache(1 + 2)", "3");
assertExprReturns("Cache('foo' || 'bar')", "foobar");
assertAxisReturns(
"[Gender].Children",
"[Gender].[F]\n"
+ "[Gender].[M]");
assertAxisReturns(
"([Gender].[M], [Marital Status].[S].PrevMember)",
"{[Gender].[M], [Marital Status].[M]}");
// inside another expression
assertAxisReturns(
"Order(Cache([Gender].Children), Cache(([Measures].[Unit Sales], [Time].[1997].[Q1])), BDESC)",
"[Gender].[M]\n"
+ "[Gender].[F]");
// doesn't work with multiple args
assertExprThrows(
"Cache(1, 2)",
"No function matches signature 'Cache(<Numeric Expression>, <Numeric Expression>)'");
}
// The following methods test VBA functions. They don't test all of them,
// because the raw methods are tested in VbaTest, but they test the core
// functionalities like error handling and operator overloading.
public void testVbaBasic() {
// Exp is a simple function: one arg.
assertExprReturns("exp(0)", "1");
assertExprReturns("exp(1)", Math.E, 0.00000001);
assertExprReturns("exp(-2)", 1d / (Math.E * Math.E), 0.00000001);
// If any arg is null, result is null.
assertExprReturns("exp(cast(null as numeric))", "");
}
// Test a VBA function with variable number of args.
public void testVbaOverloading() {
assertExprReturns("replace('xyzxyz', 'xy', 'a')", "azaz");
assertExprReturns("replace('xyzxyz', 'xy', 'a', 2)", "xyzaz");
assertExprReturns("replace('xyzxyz', 'xy', 'a', 1, 1)", "azxyz");
}
// Test VBA exception handling
public void testVbaExceptions() {
assertExprThrows(
"right(\"abc\", -4)",
Util.IBM_JVM
? "StringIndexOutOfBoundsException: null"
: "StringIndexOutOfBoundsException: "
+ "String index out of range: -4");
}
public void testVbaDateTime() {
// function which returns date
assertExprReturns(
"Format(DateSerial(2006, 4, 29), \"Long Date\")",
"Saturday, April 29, 2006");
// function with date parameter
assertExprReturns("Year(DateSerial(2006, 4, 29))", "2,006");
}
public void testExcelPi() {
// The PI function is defined in the Excel class.
assertExprReturns("Pi()", "3");
}
public void testExcelPower() {
assertExprReturns("Power(8, 0.333333)", 2.0, 0.01);
assertExprReturns("Power(-2, 0.5)", Double.NaN, 0.001);
}
// Comment from the bug: the reason for this is that in AbstractExpCompiler
// in the compileInteger method we are casting an IntegerCalc into a
// DoubleCalc and there is no check for IntegerCalc in the NumericType
// conditional path.
public void testBug1881739() {
assertExprReturns("LEFT(\"TEST\", LEN(\"TEST\"))", "TEST");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-296">
* MONDRIAN-296, "Cube getTimeDimension use when Cube has no Time
* dimension"</a>.
*/
public void testCubeTimeDimensionFails() {
assertQueryThrows(
"select LastPeriods(1) on columns from [Store]",
"'LastPeriods', no time dimension");
assertQueryThrows(
"select OpeningPeriod() on columns from [Store]",
"'OpeningPeriod', no time dimension");
assertQueryThrows(
"select OpeningPeriod([Store Type]) on columns from [Store]",
"'OpeningPeriod', no time dimension");
assertQueryThrows(
"select ClosingPeriod() on columns from [Store]",
"'ClosingPeriod', no time dimension");
assertQueryThrows(
"select ClosingPeriod([Store Type]) on columns from [Store]",
"'ClosingPeriod', no time dimension");
assertQueryThrows(
"select ParallelPeriod() on columns from [Store]",
"'ParallelPeriod', no time dimension");
assertQueryThrows(
"select PeriodsToDate() on columns from [Store]",
"'PeriodsToDate', no time dimension");
assertQueryThrows(
"select Mtd() on columns from [Store]",
"'Mtd', no time dimension");
}
public void testFilterEmpty() {
// Unlike "Descendants(<set>, ...)", we do not need to know the precise
// type of the set, therefore it is OK if the set is empty.
assertAxisReturns(
"Filter({}, 1=0)",
"");
assertAxisReturns(
"Filter({[Time].[Time].Children}, 1=0)",
"");
}
public void testFilterCalcSlicer() {
assertQueryReturns(
"with member [Time].[Time].[Date Range] as \n"
+ "'Aggregate({[Time].[1997].[Q1]:[Time].[1997].[Q3]})'\n"
+ "select\n"
+ "{[Measures].[Unit Sales],[Measures].[Store Cost],\n"
+ "[Measures].[Store Sales]} ON columns,\n"
+ "NON EMPTY Filter ([Store].[Store State].members,\n"
+ "[Measures].[Store Cost] > 75000) ON rows\n"
+ "from [Sales] where [Time].[Date Range]",
"Axis #0:\n"
+ "{[Time].[Date Range]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[WA]}\n"
+ "Row #0: 90,131\n"
+ "Row #0: 76,151.59\n"
+ "Row #0: 190,776.88\n");
assertQueryReturns(
"with member [Time].[Time].[Date Range] as \n"
+ "'Aggregate({[Time].[1997].[Q1]:[Time].[1997].[Q3]})'\n"
+ "select\n"
+ "{[Measures].[Unit Sales],[Measures].[Store Cost],\n"
+ "[Measures].[Store Sales]} ON columns,\n"
+ "NON EMPTY Order (Filter ([Store].[Store State].members,\n"
+ "[Measures].[Store Cost] > 100),[Measures].[Store Cost], DESC) ON rows\n"
+ "from [Sales] where [Time].[Date Range]",
"Axis #0:\n"
+ "{[Time].[Date Range]}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Cost]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Store].[USA].[WA]}\n"
+ "{[Store].[USA].[CA]}\n"
+ "{[Store].[USA].[OR]}\n"
+ "Row #0: 90,131\n"
+ "Row #0: 76,151.59\n"
+ "Row #0: 190,776.88\n"
+ "Row #1: 53,312\n"
+ "Row #1: 45,435.93\n"
+ "Row #1: 113,966.00\n"
+ "Row #2: 51,306\n"
+ "Row #2: 43,033.82\n"
+ "Row #2: 107,823.63\n");
}
public void testExistsMembersAll() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[Country].Members,\n"
+ " [Customers].[State Province].[CA],\n"
+ " [Customers].[Canada].[BC].[Richmond]},\n"
+ " {[Customers].[All Customers]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[Canada]}\n"
+ "{[Customers].[Mexico]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[Canada].[BC].[Richmond]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: \n"
+ "Row #0: \n"
+ "Row #0: 266,773\n"
+ "Row #0: 74,748\n"
+ "Row #0: \n");
}
public void testExistsMembersLevel2() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[Country].Members,\n"
+ " [Customers].[State Province].[CA],\n"
+ " [Customers].[Canada].[BC].[Richmond]},\n"
+ " {[Customers].[Country].[USA]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: 266,773\n"
+ "Row #0: 74,748\n");
}
public void testExistsMembersDiffDim() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[All Customers].Children,\n"
+ " [Customers].[State Province].Members},\n"
+ " {[Product].Members})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
public void testExistsMembers2Hierarchies() {
assertQueryReturns(
"select exists(\n"
+ " {[Customers].[All Customers],\n"
+ " [Customers].[All Customers].Children,\n"
+ " [Customers].[State Province].Members,\n"
+ " [Customers].[Country].[Canada],\n"
+ " [Customers].[Country].[Mexico]},\n"
+ " {[Customers].[Country].[USA],\n"
+ " [Customers].[State Province].[Veracruz]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[All Customers]}\n"
+ "{[Customers].[Mexico]}\n"
+ "{[Customers].[USA]}\n"
+ "{[Customers].[Mexico].[Veracruz]}\n"
+ "{[Customers].[USA].[CA]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "{[Customers].[USA].[WA]}\n"
+ "{[Customers].[Mexico]}\n"
+ "Row #0: 266,773\n"
+ "Row #0: \n"
+ "Row #0: 266,773\n"
+ "Row #0: \n"
+ "Row #0: 74,748\n"
+ "Row #0: 67,659\n"
+ "Row #0: 124,366\n"
+ "Row #0: \n");
}
public void testExistsTuplesAll() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Product].[All Products]},{[Customers].[All Customers]}),\n"
+ " {[Customers].[All Customers]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[All Products], [Customers].[All Customers]}\n"
+ "Row #0: 266,773\n");
}
public void testExistsTuplesLevel2() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Product].[All Products]},{[Customers].[All Customers].Children}),\n"
+ " {[Customers].[All Customers].[USA]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Product].[All Products], [Customers].[USA]}\n"
+ "Row #0: 266,773\n");
}
public void testExistsTuplesLevel23() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Customers].[State Province].Members}, {[Product].[All Products]}),\n"
+ " {[Customers].[All Customers].[USA]})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA], [Product].[All Products]}\n"
+ "{[Customers].[USA].[OR], [Product].[All Products]}\n"
+ "{[Customers].[USA].[WA], [Product].[All Products]}\n"
+ "Row #0: 74,748\n"
+ "Row #0: 67,659\n"
+ "Row #0: 124,366\n");
}
public void testExistsTuples2Dim() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin({[Customers].[State Province].Members}, {[Product].[Product Family].Members}),\n"
+ " {([Product].[Product Department].[Dairy],[Customers].[All Customers].[USA])})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Customers].[USA].[CA], [Product].[Drink]}\n"
+ "{[Customers].[USA].[OR], [Product].[Drink]}\n"
+ "{[Customers].[USA].[WA], [Product].[Drink]}\n"
+ "Row #0: 7,102\n"
+ "Row #0: 6,106\n"
+ "Row #0: 11,389\n");
}
public void testExistsTuplesDiffDim() {
assertQueryReturns(
"select exists(\n"
+ " crossjoin(\n"
+ " crossjoin({[Customers].[State Province].Members},\n"
+ " {[Time].[Year].[1997]}), \n"
+ " {[Product].[Product Family].Members}),\n"
+ " {([Product].[Product Department].[Dairy],\n"
+ " [Promotions].[All Promotions], \n"
+ " [Customers].[All Customers].[USA])})\n"
+ "on 0 from Sales",
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n");
}
/**
* Executes a query that has a complex parse tree. Goal is to find
* algorithmic complexity bugs in the validator which would make the query
* run extremely slowly.
*/
public void testComplexQuery() {
final String expected =
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "Axis #2:\n"
+ "{[Gender].[All Gender]}\n"
+ "{[Gender].[F]}\n"
+ "{[Gender].[M]}\n"
+ "Row #0: 266,773\n"
+ "Row #1: 131,558\n"
+ "Row #2: 135,215\n";
// hand written case
assertQueryReturns(
"select\n"
+ " [Measures].[Unit Sales] on 0,\n"
+ " Distinct({\n"
+ " [Gender],\n"
+ " Tail(\n"
+ " Head({\n"
+ " [Gender],\n"
+ " [Gender].[F],\n"
+ " [Gender].[M]},\n"
+ " 2),\n"
+ " 1),\n"
+ " Tail(\n"
+ " Head({\n"
+ " [Gender],\n"
+ " [Gender].[F],\n"
+ " [Gender].[M]},\n"
+ " 2),\n"
+ " 1),\n"
+ " [Gender].[M]}) on 1\n"
+ "from [Sales]", expected);
// generated equivalent
StringBuilder buf = new StringBuilder();
buf.append(
"select\n"
+ " [Measures].[Unit Sales] on 0,\n");
generateComplex(buf, " ", 0, 7, 3);
buf.append(
" on 1\n"
+ "from [Sales]");
if (false) {
System.out.println(buf.toString().length() + ": " + buf.toString());
}
assertQueryReturns(buf.toString(), expected);
}
/**
* Recursive routine to generate a complex MDX expression.
*
* @param buf String builder
* @param indent Indent
* @param depth Current depth
* @param depthLimit Max recursion depth
* @param breadth Number of iterations at each depth
*/
private void generateComplex(
StringBuilder buf,
String indent,
int depth,
int depthLimit,
int breadth)
{
buf.append(indent + "Distinct({\n");
buf.append(indent + " [Gender],\n");
for (int i = 0; i < breadth; i++) {
if (depth < depthLimit) {
buf.append(indent + " Tail(\n");
buf.append(indent + " Head({\n");
generateComplex(
buf,
indent + " ",
depth + 1,
depthLimit,
breadth);
buf.append("},\n");
buf.append(indent + " 2),\n");
buf.append(indent + " 1),\n");
} else {
buf.append(indent + " [Gender].[F],\n");
}
}
buf.append(indent + " [Gender].[M]})");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-1050">
* MONDRIAN-1050, "MDX Order function fails when using DateTime expression
* for ordering"</a>.
*/
public void testDateParameter() throws Exception {
executeQuery(
"SELECT {[Measures].[Unit Sales]} ON COLUMNS, Order([Gender].Members, Now(), ASC) ON ROWS FROM [Sales]");
}
/**
* Testcase for bug <a href="http://jira.pentaho.com/browse/MONDRIAN-1043">
* MONDRIAN-1043, "Hierarchize with Except sort set members differently than
* in Mondrian 3.2.1"</a>.
*
* <p>This test makes sure that
* Hierarchize and Except can be used within each other and that the
* sort order is maintained.</p>
*/
public void testHierarchizeExcept() throws Exception {
final String[] mdxA =
new String[] {
"SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS, Hierarchize(Except({[Customers].[USA].Children, [Customers].[USA].[CA].Children}, [Customers].[USA].[CA])) ON ROWS FROM [Sales]",
"SELECT {[Measures].[Unit Sales], [Measures].[Store Sales]} ON COLUMNS, Except(Hierarchize({[Customers].[USA].Children, [Customers].[USA].[CA].Children}), [Customers].[USA].[CA]) ON ROWS FROM [Sales] "
};
for (String mdx : mdxA) {
assertQueryReturns(
mdx,
"Axis #0:\n"
+ "{}\n"
+ "Axis #1:\n"
+ "{[Measures].[Unit Sales]}\n"
+ "{[Measures].[Store Sales]}\n"
+ "Axis #2:\n"
+ "{[Customers].[USA].[CA].[Altadena]}\n"
+ "{[Customers].[USA].[CA].[Arcadia]}\n"
+ "{[Customers].[USA].[CA].[Bellflower]}\n"
+ "{[Customers].[USA].[CA].[Berkeley]}\n"
+ "{[Customers].[USA].[CA].[Beverly Hills]}\n"
+ "{[Customers].[USA].[CA].[Burbank]}\n"
+ "{[Customers].[USA].[CA].[Burlingame]}\n"
+ "{[Customers].[USA].[CA].[Chula Vista]}\n"
+ "{[Customers].[USA].[CA].[Colma]}\n"
+ "{[Customers].[USA].[CA].[Concord]}\n"
+ "{[Customers].[USA].[CA].[Coronado]}\n"
+ "{[Customers].[USA].[CA].[Daly City]}\n"
+ "{[Customers].[USA].[CA].[Downey]}\n"
+ "{[Customers].[USA].[CA].[El Cajon]}\n"
+ "{[Customers].[USA].[CA].[Fremont]}\n"
+ "{[Customers].[USA].[CA].[Glendale]}\n"
+ "{[Customers].[USA].[CA].[Grossmont]}\n"
+ "{[Customers].[USA].[CA].[Imperial Beach]}\n"
+ "{[Customers].[USA].[CA].[La Jolla]}\n"
+ "{[Customers].[USA].[CA].[La Mesa]}\n"
+ "{[Customers].[USA].[CA].[Lakewood]}\n"
+ "{[Customers].[USA].[CA].[Lemon Grove]}\n"
+ "{[Customers].[USA].[CA].[Lincoln Acres]}\n"
+ "{[Customers].[USA].[CA].[Long Beach]}\n"
+ "{[Customers].[USA].[CA].[Los Angeles]}\n"
+ "{[Customers].[USA].[CA].[Mill Valley]}\n"
+ "{[Customers].[USA].[CA].[National City]}\n"
+ "{[Customers].[USA].[CA].[Newport Beach]}\n"
+ "{[Customers].[USA].[CA].[Novato]}\n"
+ "{[Customers].[USA].[CA].[Oakland]}\n"
+ "{[Customers].[USA].[CA].[Palo Alto]}\n"
+ "{[Customers].[USA].[CA].[Pomona]}\n"
+ "{[Customers].[USA].[CA].[Redwood City]}\n"
+ "{[Customers].[USA].[CA].[Richmond]}\n"
+ "{[Customers].[USA].[CA].[San Carlos]}\n"
+ "{[Customers].[USA].[CA].[San Diego]}\n"
+ "{[Customers].[USA].[CA].[San Francisco]}\n"
+ "{[Customers].[USA].[CA].[San Gabriel]}\n"
+ "{[Customers].[USA].[CA].[San Jose]}\n"
+ "{[Customers].[USA].[CA].[Santa Cruz]}\n"
+ "{[Customers].[USA].[CA].[Santa Monica]}\n"
+ "{[Customers].[USA].[CA].[Spring Valley]}\n"
+ "{[Customers].[USA].[CA].[Torrance]}\n"
+ "{[Customers].[USA].[CA].[West Covina]}\n"
+ "{[Customers].[USA].[CA].[Woodland Hills]}\n"
+ "{[Customers].[USA].[OR]}\n"
+ "{[Customers].[USA].[WA]}\n"
+ "Row #0: 2,574\n"
+ "Row #0: 5,585.59\n"
+ "Row #1: 2,440\n"
+ "Row #1: 5,136.59\n"
+ "Row #2: 3,106\n"
+ "Row #2: 6,633.97\n"
+ "Row #3: 136\n"
+ "Row #3: 320.17\n"
+ "Row #4: 2,907\n"
+ "Row #4: 6,194.37\n"
+ "Row #5: 3,086\n"
+ "Row #5: 6,577.33\n"
+ "Row #6: 198\n"
+ "Row #6: 407.38\n"
+ "Row #7: 2,999\n"
+ "Row #7: 6,284.30\n"
+ "Row #8: 129\n"
+ "Row #8: 287.78\n"
+ "Row #9: 105\n"
+ "Row #9: 219.77\n"
+ "Row #10: 2,391\n"
+ "Row #10: 5,051.15\n"
+ "Row #11: 129\n"
+ "Row #11: 271.60\n"
+ "Row #12: 3,440\n"
+ "Row #12: 7,367.06\n"
+ "Row #13: 2,543\n"
+ "Row #13: 5,460.42\n"
+ "Row #14: 163\n"
+ "Row #14: 350.22\n"
+ "Row #15: 3,284\n"
+ "Row #15: 7,082.91\n"
+ "Row #16: 2,131\n"
+ "Row #16: 4,458.60\n"
+ "Row #17: 1,616\n"
+ "Row #17: 3,409.34\n"
+ "Row #18: 1,938\n"
+ "Row #18: 4,081.37\n"
+ "Row #19: 1,834\n"
+ "Row #19: 3,908.26\n"
+ "Row #20: 2,487\n"
+ "Row #20: 5,174.12\n"
+ "Row #21: 2,651\n"
+ "Row #21: 5,636.82\n"
+ "Row #22: 2,176\n"
+ "Row #22: 4,691.94\n"
+ "Row #23: 2,973\n"
+ "Row #23: 6,422.37\n"
+ "Row #24: 2,009\n"
+ "Row #24: 4,312.99\n"
+ "Row #25: 58\n"
+ "Row #25: 109.36\n"
+ "Row #26: 2,031\n"
+ "Row #26: 4,237.46\n"
+ "Row #27: 3,098\n"
+ "Row #27: 6,696.06\n"
+ "Row #28: 163\n"
+ "Row #28: 335.98\n"
+ "Row #29: 70\n"
+ "Row #29: 145.90\n"
+ "Row #30: 133\n"
+ "Row #30: 272.08\n"
+ "Row #31: 2,712\n"
+ "Row #31: 5,595.62\n"
+ "Row #32: 144\n"
+ "Row #32: 312.43\n"
+ "Row #33: 110\n"
+ "Row #33: 212.45\n"
+ "Row #34: 145\n"
+ "Row #34: 289.80\n"
+ "Row #35: 1,535\n"
+ "Row #35: 3,348.69\n"
+ "Row #36: 88\n"
+ "Row #36: 195.28\n"
+ "Row #37: 2,631\n"
+ "Row #37: 5,663.60\n"
+ "Row #38: 161\n"
+ "Row #38: 343.20\n"
+ "Row #39: 185\n"
+ "Row #39: 367.78\n"
+ "Row #40: 2,660\n"
+ "Row #40: 5,739.63\n"
+ "Row #41: 1,790\n"
+ "Row #41: 3,862.79\n"
+ "Row #42: 2,570\n"
+ "Row #42: 5,405.02\n"
+ "Row #43: 2,503\n"
+ "Row #43: 5,302.08\n"
+ "Row #44: 2,516\n"
+ "Row #44: 5,406.21\n"
+ "Row #45: 67,659\n"
+ "Row #45: 142,277.07\n"
+ "Row #46: 124,366\n"
+ "Row #46: 263,793.22\n");
}
}
}
// End FunctionTest.java
|
Fix up Paul's code.
|
testsrc/main/mondrian/olap/fun/FunctionTest.java
|
Fix up Paul's code.
|
|
Java
|
mpl-2.0
|
496bb5266ebdaaf119964d61bb06cfcc9ec1b6aa
| 0
|
swannodette/rhino,rasmuserik/rhino,Pilarbrist/rhino,lv7777/egit_test,Pilarbrist/rhino,lv7777/egit_test,swannodette/rhino,Angelfirenze/rhino,AlexTrotsenko/rhino,tntim96/htmlunit-rhino-fork,jsdoc3/rhino,swannodette/rhino,Angelfirenze/rhino,sam/htmlunit-rhino-fork,sainaen/rhino,qhanam/rhino,lv7777/egit_test,jsdoc3/rhino,ashwinrayaprolu1984/rhino,tejassaoji/RhinoCoarseTainting,tejassaoji/RhinoCoarseTainting,tejassaoji/RhinoCoarseTainting,tuchida/rhino,InstantWebP2P/rhino-android,Angelfirenze/rhino,Pilarbrist/rhino,swannodette/rhino,qhanam/rhino,Pilarbrist/rhino,ashwinrayaprolu1984/rhino,AlexTrotsenko/rhino,swannodette/rhino,Angelfirenze/rhino,Angelfirenze/rhino,lv7777/egit_test,sam/htmlunit-rhino-fork,tuchida/rhino,tntim96/rhino-apigee,sam/htmlunit-rhino-fork,sam/htmlunit-rhino-fork,AlexTrotsenko/rhino,tuchida/rhino,tntim96/rhino-jscover,Distrotech/rhino,sainaen/rhino,tuchida/rhino,tntim96/rhino-jscover-repackaged,sainaen/rhino,tejassaoji/RhinoCoarseTainting,Pilarbrist/rhino,tntim96/rhino-apigee,tntim96/rhino-jscover,Angelfirenze/rhino,AlexTrotsenko/rhino,lv7777/egit_test,sainaen/rhino,rasmuserik/rhino,tuchida/rhino,tejassaoji/RhinoCoarseTainting,AlexTrotsenko/rhino,tejassaoji/RhinoCoarseTainting,swannodette/rhino,sam/htmlunit-rhino-fork,sam/htmlunit-rhino-fork,ashwinrayaprolu1984/rhino,ashwinrayaprolu1984/rhino,AlexTrotsenko/rhino,tejassaoji/RhinoCoarseTainting,jsdoc3/rhino,tuchida/rhino,Angelfirenze/rhino,ashwinrayaprolu1984/rhino,tuchida/rhino,ashwinrayaprolu1984/rhino,sam/htmlunit-rhino-fork,qhanam/rhino,lv7777/egit_test,ashwinrayaprolu1984/rhino,sainaen/rhino,tntim96/htmlunit-rhino-fork,tntim96/rhino-apigee,sainaen/rhino,lv7777/egit_test,Pilarbrist/rhino,swannodette/rhino,InstantWebP2P/rhino-android,qhanam/rhino,Pilarbrist/rhino,sainaen/rhino,AlexTrotsenko/rhino,Distrotech/rhino,tntim96/rhino-jscover-repackaged
|
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Norris Boyd
* Igor Bukanov
* Bob Jervis
* Mike McCabe
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package org.mozilla.javascript;
import java.util.AbstractCollection;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* This class implements the Object native object.
* See ECMA 15.2.
* @author Norris Boyd
*/
public class NativeObject extends IdScriptableObject implements Map
{
static final long serialVersionUID = -6345305608474346996L;
private static final Object OBJECT_TAG = "Object";
static void init(Scriptable scope, boolean sealed)
{
NativeObject obj = new NativeObject();
obj.exportAsJSClass(MAX_PROTOTYPE_ID, scope, sealed);
}
@Override
public String getClassName()
{
return "Object";
}
@Override
public String toString()
{
return ScriptRuntime.defaultObjectToString(this);
}
@Override
protected void fillConstructorProperties(IdFunctionObject ctor)
{
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_getPrototypeOf,
"getPrototypeOf", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_keys,
"keys", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_getOwnPropertyNames,
"getOwnPropertyNames", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_getOwnPropertyDescriptor,
"getOwnPropertyDescriptor", 2);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_defineProperty,
"defineProperty", 3);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_isExtensible,
"isExtensible", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_preventExtensions,
"preventExtensions", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_defineProperties,
"defineProperties", 2);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_create,
"create", 2);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_isSealed,
"isSealed", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_isFrozen,
"isFrozen", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_seal,
"seal", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_freeze,
"freeze", 1);
super.fillConstructorProperties(ctor);
}
@Override
protected void initPrototypeId(int id)
{
String s;
int arity;
switch (id) {
case Id_constructor: arity=1; s="constructor"; break;
case Id_toString: arity=0; s="toString"; break;
case Id_toLocaleString: arity=0; s="toLocaleString"; break;
case Id_valueOf: arity=0; s="valueOf"; break;
case Id_hasOwnProperty: arity=1; s="hasOwnProperty"; break;
case Id_propertyIsEnumerable:
arity=1; s="propertyIsEnumerable"; break;
case Id_isPrototypeOf: arity=1; s="isPrototypeOf"; break;
case Id_toSource: arity=0; s="toSource"; break;
case Id___defineGetter__:
arity=2; s="__defineGetter__"; break;
case Id___defineSetter__:
arity=2; s="__defineSetter__"; break;
case Id___lookupGetter__:
arity=1; s="__lookupGetter__"; break;
case Id___lookupSetter__:
arity=1; s="__lookupSetter__"; break;
default: throw new IllegalArgumentException(String.valueOf(id));
}
initPrototypeMethod(OBJECT_TAG, id, s, arity);
}
@Override
public Object execIdCall(IdFunctionObject f, Context cx, Scriptable scope,
Scriptable thisObj, Object[] args)
{
if (!f.hasTag(OBJECT_TAG)) {
return super.execIdCall(f, cx, scope, thisObj, args);
}
int id = f.methodId();
switch (id) {
case Id_constructor: {
if (thisObj != null) {
// BaseFunction.construct will set up parent, proto
return f.construct(cx, scope, args);
}
if (args.length == 0 || args[0] == null
|| args[0] == Undefined.instance)
{
return new NativeObject();
}
return ScriptRuntime.toObject(cx, scope, args[0]);
}
case Id_toLocaleString: // For now just alias toString
case Id_toString: {
if (cx.hasFeature(Context.FEATURE_TO_STRING_AS_SOURCE)) {
String s = ScriptRuntime.defaultObjectToSource(cx, scope,
thisObj, args);
int L = s.length();
if (L != 0 && s.charAt(0) == '(' && s.charAt(L - 1) == ')') {
// Strip () that surrounds toSource
s = s.substring(1, L - 1);
}
return s;
}
return ScriptRuntime.defaultObjectToString(thisObj);
}
case Id_valueOf:
return thisObj;
case Id_hasOwnProperty: {
boolean result;
if (args.length == 0) {
result = false;
} else {
String s = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
if (s == null) {
int index = ScriptRuntime.lastIndexResult(cx);
result = thisObj.has(index, thisObj);
} else {
result = thisObj.has(s, thisObj);
}
}
return ScriptRuntime.wrapBoolean(result);
}
case Id_propertyIsEnumerable: {
boolean result;
if (args.length == 0) {
result = false;
} else {
String s = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
if (s == null) {
int index = ScriptRuntime.lastIndexResult(cx);
result = thisObj.has(index, thisObj);
if (result && thisObj instanceof ScriptableObject) {
ScriptableObject so = (ScriptableObject)thisObj;
int attrs = so.getAttributes(index);
result = ((attrs & ScriptableObject.DONTENUM) == 0);
}
} else {
result = thisObj.has(s, thisObj);
if (result && thisObj instanceof ScriptableObject) {
ScriptableObject so = (ScriptableObject)thisObj;
int attrs = so.getAttributes(s);
result = ((attrs & ScriptableObject.DONTENUM) == 0);
}
}
}
return ScriptRuntime.wrapBoolean(result);
}
case Id_isPrototypeOf: {
boolean result = false;
if (args.length != 0 && args[0] instanceof Scriptable) {
Scriptable v = (Scriptable) args[0];
do {
v = v.getPrototype();
if (v == thisObj) {
result = true;
break;
}
} while (v != null);
}
return ScriptRuntime.wrapBoolean(result);
}
case Id_toSource:
return ScriptRuntime.defaultObjectToSource(cx, scope, thisObj,
args);
case Id___defineGetter__:
case Id___defineSetter__:
{
if (args.length < 2 || !(args[1] instanceof Callable)) {
Object badArg = (args.length >= 2 ? args[1]
: Undefined.instance);
throw ScriptRuntime.notFunctionError(badArg);
}
if (!(thisObj instanceof ScriptableObject)) {
throw Context.reportRuntimeError2(
"msg.extend.scriptable",
thisObj.getClass().getName(),
String.valueOf(args[0]));
}
ScriptableObject so = (ScriptableObject)thisObj;
String name = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
int index = (name != null ? 0
: ScriptRuntime.lastIndexResult(cx));
Callable getterOrSetter = (Callable)args[1];
boolean isSetter = (id == Id___defineSetter__);
so.setGetterOrSetter(name, index, getterOrSetter, isSetter);
if (so instanceof NativeArray)
((NativeArray)so).setDenseOnly(false);
}
return Undefined.instance;
case Id___lookupGetter__:
case Id___lookupSetter__:
{
if (args.length < 1 ||
!(thisObj instanceof ScriptableObject))
return Undefined.instance;
ScriptableObject so = (ScriptableObject)thisObj;
String name = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
int index = (name != null ? 0
: ScriptRuntime.lastIndexResult(cx));
boolean isSetter = (id == Id___lookupSetter__);
Object gs;
for (;;) {
gs = so.getGetterOrSetter(name, index, isSetter);
if (gs != null)
break;
// If there is no getter or setter for the object itself,
// how about the prototype?
Scriptable v = so.getPrototype();
if (v == null)
break;
if (v instanceof ScriptableObject)
so = (ScriptableObject)v;
else
break;
}
if (gs != null)
return gs;
}
return Undefined.instance;
case ConstructorId_getPrototypeOf:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
Scriptable obj = ensureScriptable(arg);
return obj.getPrototype();
}
case ConstructorId_keys:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
Scriptable obj = ensureScriptable(arg);
Object[] ids = obj.getIds();
for (int i = 0; i < ids.length; i++) {
ids[i] = ScriptRuntime.toString(ids[i]);
}
return cx.newArray(scope, ids);
}
case ConstructorId_getOwnPropertyNames:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
Object[] ids = obj.getAllIds();
for (int i = 0; i < ids.length; i++) {
ids[i] = ScriptRuntime.toString(ids[i]);
}
return cx.newArray(scope, ids);
}
case ConstructorId_getOwnPropertyDescriptor:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
// TODO(norris): There's a deeper issue here if
// arg instanceof Scriptable. Should we create a new
// interface to admit the new ECMAScript 5 operations?
ScriptableObject obj = ensureScriptableObject(arg);
Object nameArg = args.length < 2 ? Undefined.instance : args[1];
String name = ScriptRuntime.toString(nameArg);
Scriptable desc = obj.getOwnPropertyDescriptor(cx, name);
return desc == null ? Undefined.instance : desc;
}
case ConstructorId_defineProperty:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
Object name = args.length < 2 ? Undefined.instance : args[1];
Object descArg = args.length < 3 ? Undefined.instance : args[2];
ScriptableObject desc = ensureScriptableObject(descArg);
obj.defineOwnProperty(cx, name, desc);
return obj;
}
case ConstructorId_isExtensible:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
return obj.isExtensible();
}
case ConstructorId_preventExtensions:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
obj.preventExtensions();
return obj;
}
case ConstructorId_defineProperties:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
Object propsObj = args.length < 2 ? Undefined.instance : args[1];
Scriptable props = Context.toObject(propsObj, getParentScope());
obj.defineOwnProperties(cx, ensureScriptableObject(props));
return obj;
}
case ConstructorId_create:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
Scriptable obj = (arg == null) ? null : ensureScriptable(arg);
ScriptableObject newObject = new NativeObject();
newObject.setParentScope(this.getParentScope());
newObject.setPrototype(obj);
if (args.length > 1 && args[1] != Undefined.instance) {
Scriptable props = Context.toObject(args[1], getParentScope());
newObject.defineOwnProperties(cx, ensureScriptableObject(props));
}
return newObject;
}
case ConstructorId_isSealed:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
if (obj.isExtensible()) return false;
for (Object name: obj.getAllIds()) {
Object configurable = obj.getOwnPropertyDescriptor(cx, name).get("configurable");
if (Boolean.TRUE.equals(configurable))
return false;
}
return true;
}
case ConstructorId_isFrozen:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
if (obj.isExtensible()) return false;
for (Object name: obj.getAllIds()) {
ScriptableObject desc = obj.getOwnPropertyDescriptor(cx, name);
if (Boolean.TRUE.equals(desc.get("configurable")))
return false;
if (isDataDescriptor(desc) && Boolean.TRUE.equals(desc.get("writable")))
return false;
}
return true;
}
case ConstructorId_seal:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
for (Object name: obj.getAllIds()) {
ScriptableObject desc = obj.getOwnPropertyDescriptor(cx, name);
if (Boolean.TRUE.equals(desc.get("configurable"))) {
desc.put("configurable", desc, false);
obj.defineOwnProperty(cx, name, desc);
}
}
obj.preventExtensions();
return obj;
}
case ConstructorId_freeze:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
for (Object name: obj.getAllIds()) {
ScriptableObject desc = obj.getOwnPropertyDescriptor(cx, name);
if (isDataDescriptor(desc) && Boolean.TRUE.equals(desc.get("writable")))
desc.put("writable", desc, false);
if (Boolean.TRUE.equals(desc.get("configurable")))
desc.put("configurable", desc, false);
obj.defineOwnProperty(cx, name, desc);
}
obj.preventExtensions();
return obj;
}
default:
throw new IllegalArgumentException(String.valueOf(id));
}
}
// methods implementing java.util.Map
public boolean containsKey(Object key) {
if (key instanceof String) {
return has((String) key, this);
} else if (key instanceof Number) {
return has(((Number) key).intValue(), this);
}
return false;
}
public boolean containsValue(Object value) {
for (Object obj : values()) {
if (value == obj ||
value != null && value.equals(obj)) {
return true;
}
}
return false;
}
public Object remove(Object key) {
Object value = get(key);
if (key instanceof String) {
delete((String) key);
} else if (key instanceof Number) {
delete(((Number) key).intValue());
}
return value;
}
public Set<Object> keySet() {
return new KeySet();
}
public Collection<Object> values() {
return new ValueCollection();
}
public Set<Map.Entry<Object, Object>> entrySet() {
return new EntrySet();
}
public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
public void putAll(Map m) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
class EntrySet extends AbstractSet<Entry<Object, Object>> {
@Override
public Iterator<Entry<Object, Object>> iterator() {
return new Iterator<Map.Entry<Object, Object>>() {
Object[] ids = getIds();
Object key = null;
int index = 0;
public boolean hasNext() {
return index < ids.length;
}
public Map.Entry<Object, Object> next() {
final Object ekey = key = ids[index++];
final Object value = get(key);
return new Map.Entry<Object, Object>() {
public Object getKey() {
return ekey;
}
public Object getValue() {
return value;
}
public Object setValue(Object value) {
throw new UnsupportedOperationException();
}
public boolean equals(Object other) {
if (!(other instanceof Map.Entry)) {
return false;
}
Map.Entry e = (Map.Entry) other;
return (ekey == null ? e.getKey() == null : ekey.equals(e.getKey()))
&& (value == null ? e.getValue() == null : value.equals(e.getValue()));
}
public int hashCode() {
return (ekey == null ? 0 : ekey.hashCode()) ^
(value == null ? 0 : value.hashCode());
}
public String toString() {
return ekey + "=" + value;
}
};
}
public void remove() {
if (key == null) {
throw new IllegalStateException();
}
NativeObject.this.remove(key);
key = null;
}
};
}
@Override
public int size() {
return NativeObject.this.size();
}
}
class KeySet extends AbstractSet<Object> {
@Override
public boolean contains(Object key) {
return containsKey(key);
}
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
Object[] ids = getIds();
Object key;
int index = 0;
public boolean hasNext() {
return index < ids.length;
}
public Object next() {
try {
return (key = ids[index++]);
} catch(ArrayIndexOutOfBoundsException e) {
key = null;
throw new NoSuchElementException();
}
}
public void remove() {
if (key == null) {
throw new IllegalStateException();
}
NativeObject.this.remove(key);
key = null;
}
};
}
@Override
public int size() {
return NativeObject.this.size();
}
}
class ValueCollection extends AbstractCollection<Object> {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
Object[] ids = getIds();
Object key;
int index = 0;
public boolean hasNext() {
return index < ids.length;
}
public Object next() {
return get((key = ids[index++]));
}
public void remove() {
if (key == null) {
throw new IllegalStateException();
}
NativeObject.this.remove(key);
key = null;
}
};
}
@Override
public int size() {
return NativeObject.this.size();
}
}
// #string_id_map#
@Override
protected int findPrototypeId(String s)
{
int id;
// #generated# Last update: 2007-05-09 08:15:55 EDT
L0: { id = 0; String X = null; int c;
L: switch (s.length()) {
case 7: X="valueOf";id=Id_valueOf; break L;
case 8: c=s.charAt(3);
if (c=='o') { X="toSource";id=Id_toSource; }
else if (c=='t') { X="toString";id=Id_toString; }
break L;
case 11: X="constructor";id=Id_constructor; break L;
case 13: X="isPrototypeOf";id=Id_isPrototypeOf; break L;
case 14: c=s.charAt(0);
if (c=='h') { X="hasOwnProperty";id=Id_hasOwnProperty; }
else if (c=='t') { X="toLocaleString";id=Id_toLocaleString; }
break L;
case 16: c=s.charAt(2);
if (c=='d') {
c=s.charAt(8);
if (c=='G') { X="__defineGetter__";id=Id___defineGetter__; }
else if (c=='S') { X="__defineSetter__";id=Id___defineSetter__; }
}
else if (c=='l') {
c=s.charAt(8);
if (c=='G') { X="__lookupGetter__";id=Id___lookupGetter__; }
else if (c=='S') { X="__lookupSetter__";id=Id___lookupSetter__; }
}
break L;
case 20: X="propertyIsEnumerable";id=Id_propertyIsEnumerable; break L;
}
if (X!=null && X!=s && !X.equals(s)) id = 0;
break L0;
}
// #/generated#
return id;
}
private static final int
ConstructorId_getPrototypeOf = -1,
ConstructorId_keys = -2,
ConstructorId_getOwnPropertyNames = -3,
ConstructorId_getOwnPropertyDescriptor = -4,
ConstructorId_defineProperty = -5,
ConstructorId_isExtensible = -6,
ConstructorId_preventExtensions = -7,
ConstructorId_defineProperties= -8,
ConstructorId_create = -9,
ConstructorId_isSealed = -10,
ConstructorId_isFrozen = -11,
ConstructorId_seal = -12,
ConstructorId_freeze = -13,
Id_constructor = 1,
Id_toString = 2,
Id_toLocaleString = 3,
Id_valueOf = 4,
Id_hasOwnProperty = 5,
Id_propertyIsEnumerable = 6,
Id_isPrototypeOf = 7,
Id_toSource = 8,
Id___defineGetter__ = 9,
Id___defineSetter__ = 10,
Id___lookupGetter__ = 11,
Id___lookupSetter__ = 12,
MAX_PROTOTYPE_ID = 12;
// #/string_id_map#
}
|
src/org/mozilla/javascript/NativeObject.java
|
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Rhino code, released
* May 6, 1999.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1997-1999
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Norris Boyd
* Igor Bukanov
* Bob Jervis
* Mike McCabe
*
* Alternatively, the contents of this file may be used under the terms of
* the GNU General Public License Version 2 or later (the "GPL"), in which
* case the provisions of the GPL are applicable instead of those above. If
* you wish to allow use of your version of this file only under the terms of
* the GPL and not to allow others to use your version of this file under the
* MPL, indicate your decision by deleting the provisions above and replacing
* them with the notice and other provisions required by the GPL. If you do
* not delete the provisions above, a recipient may use your version of this
* file under either the MPL or the GPL.
*
* ***** END LICENSE BLOCK ***** */
package org.mozilla.javascript;
import java.util.AbstractCollection;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* This class implements the Object native object.
* See ECMA 15.2.
* @author Norris Boyd
*/
public class NativeObject extends IdScriptableObject implements Map
{
static final long serialVersionUID = -6345305608474346996L;
private static final Object OBJECT_TAG = "Object";
static void init(Scriptable scope, boolean sealed)
{
NativeObject obj = new NativeObject();
obj.exportAsJSClass(MAX_PROTOTYPE_ID, scope, sealed);
}
@Override
public String getClassName()
{
return "Object";
}
@Override
public String toString()
{
return ScriptRuntime.defaultObjectToString(this);
}
@Override
protected void fillConstructorProperties(IdFunctionObject ctor)
{
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_getPrototypeOf,
"getPrototypeOf", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_keys,
"keys", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_getOwnPropertyNames,
"getOwnPropertyNames", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_getOwnPropertyDescriptor,
"getOwnPropertyDescriptor", 2);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_defineProperty,
"defineProperty", 3);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_isExtensible,
"isExtensible", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_preventExtensions,
"preventExtensions", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_defineProperties,
"defineProperties", 2);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_create,
"create", 2);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_isSealed,
"isSealed", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_isFrozen,
"isFrozen", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_seal,
"seal", 1);
addIdFunctionProperty(ctor, OBJECT_TAG, ConstructorId_freeze,
"freeze", 1);
super.fillConstructorProperties(ctor);
}
@Override
protected void initPrototypeId(int id)
{
String s;
int arity;
switch (id) {
case Id_constructor: arity=1; s="constructor"; break;
case Id_toString: arity=0; s="toString"; break;
case Id_toLocaleString: arity=0; s="toLocaleString"; break;
case Id_valueOf: arity=0; s="valueOf"; break;
case Id_hasOwnProperty: arity=1; s="hasOwnProperty"; break;
case Id_propertyIsEnumerable:
arity=1; s="propertyIsEnumerable"; break;
case Id_isPrototypeOf: arity=1; s="isPrototypeOf"; break;
case Id_toSource: arity=0; s="toSource"; break;
case Id___defineGetter__:
arity=2; s="__defineGetter__"; break;
case Id___defineSetter__:
arity=2; s="__defineSetter__"; break;
case Id___lookupGetter__:
arity=1; s="__lookupGetter__"; break;
case Id___lookupSetter__:
arity=1; s="__lookupSetter__"; break;
default: throw new IllegalArgumentException(String.valueOf(id));
}
initPrototypeMethod(OBJECT_TAG, id, s, arity);
}
@Override
public Object execIdCall(IdFunctionObject f, Context cx, Scriptable scope,
Scriptable thisObj, Object[] args)
{
if (!f.hasTag(OBJECT_TAG)) {
return super.execIdCall(f, cx, scope, thisObj, args);
}
int id = f.methodId();
switch (id) {
case Id_constructor: {
if (thisObj != null) {
// BaseFunction.construct will set up parent, proto
return f.construct(cx, scope, args);
}
if (args.length == 0 || args[0] == null
|| args[0] == Undefined.instance)
{
return new NativeObject();
}
return ScriptRuntime.toObject(cx, scope, args[0]);
}
case Id_toLocaleString: // For now just alias toString
case Id_toString: {
if (cx.hasFeature(Context.FEATURE_TO_STRING_AS_SOURCE)) {
String s = ScriptRuntime.defaultObjectToSource(cx, scope,
thisObj, args);
int L = s.length();
if (L != 0 && s.charAt(0) == '(' && s.charAt(L - 1) == ')') {
// Strip () that surrounds toSource
s = s.substring(1, L - 1);
}
return s;
}
return ScriptRuntime.defaultObjectToString(thisObj);
}
case Id_valueOf:
return thisObj;
case Id_hasOwnProperty: {
boolean result;
if (args.length == 0) {
result = false;
} else {
String s = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
if (s == null) {
int index = ScriptRuntime.lastIndexResult(cx);
result = thisObj.has(index, thisObj);
} else {
result = thisObj.has(s, thisObj);
}
}
return ScriptRuntime.wrapBoolean(result);
}
case Id_propertyIsEnumerable: {
boolean result;
if (args.length == 0) {
result = false;
} else {
String s = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
if (s == null) {
int index = ScriptRuntime.lastIndexResult(cx);
result = thisObj.has(index, thisObj);
if (result && thisObj instanceof ScriptableObject) {
ScriptableObject so = (ScriptableObject)thisObj;
int attrs = so.getAttributes(index);
result = ((attrs & ScriptableObject.DONTENUM) == 0);
}
} else {
result = thisObj.has(s, thisObj);
if (result && thisObj instanceof ScriptableObject) {
ScriptableObject so = (ScriptableObject)thisObj;
int attrs = so.getAttributes(s);
result = ((attrs & ScriptableObject.DONTENUM) == 0);
}
}
}
return ScriptRuntime.wrapBoolean(result);
}
case Id_isPrototypeOf: {
boolean result = false;
if (args.length != 0 && args[0] instanceof Scriptable) {
Scriptable v = (Scriptable) args[0];
do {
v = v.getPrototype();
if (v == thisObj) {
result = true;
break;
}
} while (v != null);
}
return ScriptRuntime.wrapBoolean(result);
}
case Id_toSource:
return ScriptRuntime.defaultObjectToSource(cx, scope, thisObj,
args);
case Id___defineGetter__:
case Id___defineSetter__:
{
if (args.length < 2 || !(args[1] instanceof Callable)) {
Object badArg = (args.length >= 2 ? args[1]
: Undefined.instance);
throw ScriptRuntime.notFunctionError(badArg);
}
if (!(thisObj instanceof ScriptableObject)) {
throw Context.reportRuntimeError2(
"msg.extend.scriptable",
thisObj.getClass().getName(),
String.valueOf(args[0]));
}
ScriptableObject so = (ScriptableObject)thisObj;
String name = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
int index = (name != null ? 0
: ScriptRuntime.lastIndexResult(cx));
Callable getterOrSetter = (Callable)args[1];
boolean isSetter = (id == Id___defineSetter__);
so.setGetterOrSetter(name, index, getterOrSetter, isSetter);
if (so instanceof NativeArray)
((NativeArray)so).setDenseOnly(false);
}
return Undefined.instance;
case Id___lookupGetter__:
case Id___lookupSetter__:
{
if (args.length < 1 ||
!(thisObj instanceof ScriptableObject))
return Undefined.instance;
ScriptableObject so = (ScriptableObject)thisObj;
String name = ScriptRuntime.toStringIdOrIndex(cx, args[0]);
int index = (name != null ? 0
: ScriptRuntime.lastIndexResult(cx));
boolean isSetter = (id == Id___lookupSetter__);
Object gs;
for (;;) {
gs = so.getGetterOrSetter(name, index, isSetter);
if (gs != null)
break;
// If there is no getter or setter for the object itself,
// how about the prototype?
Scriptable v = so.getPrototype();
if (v == null)
break;
if (v instanceof ScriptableObject)
so = (ScriptableObject)v;
else
break;
}
if (gs != null)
return gs;
}
return Undefined.instance;
case ConstructorId_getPrototypeOf:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
Scriptable obj = ensureScriptable(arg);
return obj.getPrototype();
}
case ConstructorId_keys:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
Scriptable obj = ensureScriptable(arg);
Object[] ids = obj.getIds();
for (int i = 0; i < ids.length; i++) {
ids[i] = ScriptRuntime.toString(ids[i]);
}
return cx.newArray(scope, ids);
}
case ConstructorId_getOwnPropertyNames:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
Object[] ids = obj.getAllIds();
for (int i = 0; i < ids.length; i++) {
ids[i] = ScriptRuntime.toString(ids[i]);
}
return cx.newArray(scope, ids);
}
case ConstructorId_getOwnPropertyDescriptor:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
// TODO(norris): There's a deeper issue here if
// arg instanceof Scriptable. Should we create a new
// interface to admit the new ECMAScript 5 operations?
ScriptableObject obj = ensureScriptableObject(arg);
Object nameArg = args.length < 2 ? Undefined.instance : args[1];
String name = ScriptRuntime.toString(nameArg);
Scriptable desc = obj.getOwnPropertyDescriptor(cx, name);
return desc == null ? Undefined.instance : desc;
}
case ConstructorId_defineProperty:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
Object name = args.length < 2 ? Undefined.instance : args[1];
Object descArg = args.length < 3 ? Undefined.instance : args[2];
ScriptableObject desc = ensureScriptableObject(descArg);
obj.defineOwnProperty(cx, name, desc);
return obj;
}
case ConstructorId_isExtensible:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
return obj.isExtensible();
}
case ConstructorId_preventExtensions:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
obj.preventExtensions();
return obj;
}
case ConstructorId_defineProperties:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
Object propsObj = args.length < 2 ? Undefined.instance : args[1];
Scriptable props = Context.toObject(propsObj, getParentScope());
obj.defineOwnProperties(cx, ensureScriptableObject(props));
return obj;
}
case ConstructorId_create:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
Scriptable obj = (arg == null) ? null : ensureScriptable(arg);
ScriptableObject newObject = new NativeObject();
newObject.setParentScope(this.getParentScope());
newObject.setPrototype(obj);
if (args.length > 1 && args[1] != Undefined.instance) {
Scriptable props = Context.toObject(args[1], getParentScope());
newObject.defineOwnProperties(cx, ensureScriptableObject(props));
}
return newObject;
}
case ConstructorId_isSealed:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
if (obj.isExtensible()) return false;
for (Object name: obj.getAllIds()) {
Object configurable = obj.getOwnPropertyDescriptor(cx, name).get("configurable");
if (Boolean.TRUE.equals(configurable))
return false;
}
return true;
}
case ConstructorId_isFrozen:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
if (obj.isExtensible()) return false;
for (Object name: obj.getAllIds()) {
ScriptableObject desc = obj.getOwnPropertyDescriptor(cx, name);
if (Boolean.TRUE.equals(desc.get("configurable")))
return false;
if (isDataDescriptor(desc) && Boolean.TRUE.equals(desc.get("writable")))
return false;
}
return true;
}
case ConstructorId_seal:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
for (Object name: obj.getAllIds()) {
ScriptableObject desc = obj.getOwnPropertyDescriptor(cx, name);
if (Boolean.TRUE.equals(desc.get("configurable"))) {
desc.put("configurable", desc, false);
obj.defineOwnProperty(cx, name, desc);
}
}
obj.preventExtensions();
return obj;
}
case ConstructorId_freeze:
{
Object arg = args.length < 1 ? Undefined.instance : args[0];
ScriptableObject obj = ensureScriptableObject(arg);
for (Object name: obj.getAllIds()) {
ScriptableObject desc = obj.getOwnPropertyDescriptor(cx, name);
if (isDataDescriptor(desc) && Boolean.TRUE.equals(desc.get("writable")))
desc.put("writable", desc, false);
if (Boolean.TRUE.equals(desc.get("configurable")))
desc.put("configurable", desc, false);
obj.defineOwnProperty(cx, name, desc);
}
obj.preventExtensions();
return obj;
}
default:
throw new IllegalArgumentException(String.valueOf(id));
}
}
// methods implementing java.util.Map
public boolean containsKey(Object key) {
if (key instanceof String) {
return has((String) key, this);
} else if (key instanceof Number) {
return has(((Number) key).intValue(), this);
}
return false;
}
public boolean containsValue(Object value) {
for (Object obj : values()) {
if (value == obj ||
value != null && value.equals(obj)) {
return true;
}
}
return false;
}
public Object remove(Object key) {
Object value = get(key);
if (key instanceof String) {
delete((String) key);
} else if (key instanceof Number) {
delete(((Number) key).intValue());
}
return value;
}
public Set<Object> keySet() {
return new KeySet();
}
public Collection<Object> values() {
return new ValueCollection();
}
public Set<Map.Entry<Object, Object>> entrySet() {
return new EntrySet();
}
public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
public void putAll(Map m) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
class EntrySet extends AbstractSet<Entry<Object, Object>> {
@Override
public Iterator<Entry<Object, Object>> iterator() {
return new Iterator<Map.Entry<Object, Object>>() {
Object[] ids = getIds();
Object key = null;
int index = 0;
public boolean hasNext() {
return index < ids.length;
}
public Map.Entry<Object, Object> next() {
final Object ekey = key = ids[index++];
final Object value = get(key);
return new Map.Entry<Object, Object>() {
public Object getKey() {
return ekey;
}
public Object getValue() {
return value;
}
public Object setValue(Object value) {
throw new UnsupportedOperationException();
}
public boolean equals(Object other) {
if (!(other instanceof Map.Entry)) {
return false;
}
Map.Entry e = (Map.Entry) other;
return (ekey == null ? e.getKey() == null : ekey.equals(e.getKey()))
&& (value == null ? e.getValue() == null : value.equals(e.getValue()));
}
public int hashCode() {
return (ekey == null ? 0 : ekey.hashCode()) ^
(value == null ? 0 : value.hashCode());
}
public String toString() {
return ekey + "=" + value;
}
};
}
public void remove() {
if (key == null) {
throw new IllegalStateException();
}
NativeObject.this.remove(key);
key = null;
}
};
}
@Override
public int size() {
return NativeObject.this.size();
}
}
class KeySet extends AbstractSet<Object> {
@Override
public boolean contains(Object key) {
return containsKey(key);
}
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
Object[] ids = getIds();
Object key;
int index = 0;
public boolean hasNext() {
return index < ids.length;
}
public Object next() {
return (key = ids[index++]);
}
public void remove() {
if (key == null) {
throw new IllegalStateException();
}
NativeObject.this.remove(key);
key = null;
}
};
}
@Override
public int size() {
return NativeObject.this.size();
}
}
class ValueCollection extends AbstractCollection<Object> {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
Object[] ids = getIds();
Object key;
int index = 0;
public boolean hasNext() {
return index < ids.length;
}
public Object next() {
return get((key = ids[index++]));
}
public void remove() {
if (key == null) {
throw new IllegalStateException();
}
NativeObject.this.remove(key);
key = null;
}
};
}
@Override
public int size() {
return NativeObject.this.size();
}
}
// #string_id_map#
@Override
protected int findPrototypeId(String s)
{
int id;
// #generated# Last update: 2007-05-09 08:15:55 EDT
L0: { id = 0; String X = null; int c;
L: switch (s.length()) {
case 7: X="valueOf";id=Id_valueOf; break L;
case 8: c=s.charAt(3);
if (c=='o') { X="toSource";id=Id_toSource; }
else if (c=='t') { X="toString";id=Id_toString; }
break L;
case 11: X="constructor";id=Id_constructor; break L;
case 13: X="isPrototypeOf";id=Id_isPrototypeOf; break L;
case 14: c=s.charAt(0);
if (c=='h') { X="hasOwnProperty";id=Id_hasOwnProperty; }
else if (c=='t') { X="toLocaleString";id=Id_toLocaleString; }
break L;
case 16: c=s.charAt(2);
if (c=='d') {
c=s.charAt(8);
if (c=='G') { X="__defineGetter__";id=Id___defineGetter__; }
else if (c=='S') { X="__defineSetter__";id=Id___defineSetter__; }
}
else if (c=='l') {
c=s.charAt(8);
if (c=='G') { X="__lookupGetter__";id=Id___lookupGetter__; }
else if (c=='S') { X="__lookupSetter__";id=Id___lookupSetter__; }
}
break L;
case 20: X="propertyIsEnumerable";id=Id_propertyIsEnumerable; break L;
}
if (X!=null && X!=s && !X.equals(s)) id = 0;
break L0;
}
// #/generated#
return id;
}
private static final int
ConstructorId_getPrototypeOf = -1,
ConstructorId_keys = -2,
ConstructorId_getOwnPropertyNames = -3,
ConstructorId_getOwnPropertyDescriptor = -4,
ConstructorId_defineProperty = -5,
ConstructorId_isExtensible = -6,
ConstructorId_preventExtensions = -7,
ConstructorId_defineProperties= -8,
ConstructorId_create = -9,
ConstructorId_isSealed = -10,
ConstructorId_isFrozen = -11,
ConstructorId_seal = -12,
ConstructorId_freeze = -13,
Id_constructor = 1,
Id_toString = 2,
Id_toLocaleString = 3,
Id_valueOf = 4,
Id_hasOwnProperty = 5,
Id_propertyIsEnumerable = 6,
Id_isPrototypeOf = 7,
Id_toSource = 8,
Id___defineGetter__ = 9,
Id___defineSetter__ = 10,
Id___lookupGetter__ = 11,
Id___lookupSetter__ = 12,
MAX_PROTOTYPE_ID = 12;
// #/string_id_map#
}
|
next() should throw NoSuchElementException
|
src/org/mozilla/javascript/NativeObject.java
|
next() should throw NoSuchElementException
|
|
Java
|
mpl-2.0
|
error: pathspec 'qadevOOo/tests/java/mod/_fwk/UIConfigurationManager.java' did not match any file(s) known to git
|
064ec7ea3fd8a82f38523a284528757ddb018e05
| 1
|
JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core
|
/*************************************************************************
*
* $RCSfile: UIConfigurationManager.java,v $
*
* $Revision: 1.2 $
*
* last change: $Date: 2004-02-25 18:15:20 $
*
* The Contents of this file are made available subject to the terms of
* either of the following licenses
*
* - GNU Lesser General Public License Version 2.1
* - Sun Industry Standards Source License Version 1.1
*
* Sun Microsystems Inc., October, 2000
*
* GNU Lesser General Public License Version 2.1
* =============================================
* Copyright 2000 by Sun Microsystems, Inc.
* 901 San Antonio Road, Palo Alto, CA 94303, USA
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License version 2.1, as published by the Free Software Foundation.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*
*
* Sun Industry Standards Source License Version 1.1
* =================================================
* The contents of this file are subject to the Sun Industry Standards
* Source License Version 1.1 (the "License"); You may not use this file
* except in compliance with the License. You may obtain a copy of the
* License at http://www.openoffice.org/license.html.
*
* Software provided under this License is provided on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING,
* WITHOUT LIMITATION, WARRANTIES THAT THE SOFTWARE IS FREE OF DEFECTS,
* MERCHANTABLE, FIT FOR A PARTICULAR PURPOSE, OR NON-INFRINGING.
* See the License for the specific provisions governing your rights and
* obligations concerning the Software.
*
* The Initial Developer of the Original Code is: Sun Microsystems, Inc.
*
* Copyright: 2000 by Sun Microsystems, Inc.
*
* All Rights Reserved.
*
* Contributor(s): _______________________________________
*
*
************************************************************************/
package mod._fwk;
import com.sun.star.beans.PropertyValue;
import com.sun.star.container.XIndexAccess;
import com.sun.star.container.XIndexContainer;
import com.sun.star.embed.ElementModes;
import com.sun.star.embed.XStorage;
import com.sun.star.embed.XTransactedObject;
import com.sun.star.frame.XController;
import com.sun.star.frame.XModel;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.lang.XSingleServiceFactory;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.uno.XInterface;
import drafts.com.sun.star.ui.XModuleUIConfigurationManagerSupplier;
import ifc.ui._XUIConfiguration;
import java.io.PrintWriter;
import com.sun.star.lang.EventObject;
import com.sun.star.text.XText;
import com.sun.star.text.XTextCursor;
import com.sun.star.text.XTextDocument;
import com.sun.star.util.XCloseable;
import drafts.com.sun.star.ui.ConfigurationEvent;
import drafts.com.sun.star.ui.UIElementType;
import drafts.com.sun.star.ui.XUIConfigurationManager;
import drafts.com.sun.star.ui.XUIConfigurationManagerSupplier;
import drafts.com.sun.star.ui.XUIConfigurationStorage;
import drafts.com.sun.star.ui.XUIElement;
import drafts.com.sun.star.ui.XUIElementFactory;
import ifc.ui._XUIConfigurationManager;
import lib.StatusException;
import lib.TestCase;
import lib.TestEnvironment;
import lib.TestParameters;
import util.WriterTools;
import util.utils;
/**
*/
public class UIConfigurationManager extends TestCase {
XUIConfigurationManager xManager = null;
/**
* Create test environment:
* <ul>
* <li>create a text doc</li>
* <li>get the model from the text doc</li>
* <li>query model for XUIConfigurationManagerSupplier interface</li>
* <li>get the manager from the supplier</li>
* </ul>
* @param tParam The test parameters.
* @param The log writer.
* @return The test environment.
*/
protected TestEnvironment createTestEnvironment(TestParameters tParam, PrintWriter log) {
TestEnvironment tEnv = null;
XMultiServiceFactory xMSF = (XMultiServiceFactory)tParam.getMSF();
log.println("Creating instance...");
try {
xManager = (XUIConfigurationManager)UnoRuntime.queryInterface(
XUIConfigurationManager.class, xMSF.createInstance(
"com.sun.star.comp.framework.UIConfigurationManager"));
}
catch(com.sun.star.uno.Exception e) {
}
// just to make sure, it's the right one.
log.println("TestObject: " + utils.getImplName(xManager));
tEnv = new TestEnvironment(xManager);
// create a configuration storage
try {
XStorage xSubStorage = null;
Object o = (XInterface)xMSF.createInstance("com.sun.star.embed.StorageFactory");
XSingleServiceFactory xSSF = (XSingleServiceFactory)UnoRuntime.queryInterface(
XSingleServiceFactory.class, o);
Object[] props = new Object[2];
props[0] = util.utils.getFullTestURL("delete.cfg");
props[1] = new Integer(ElementModes.ELEMENT_READWRITE);
XStorage xRootStorage = (XStorage)UnoRuntime.queryInterface(XStorage.class, xSSF.createInstanceWithArguments(props));
xSubStorage = xRootStorage.openStorageElement("Configurations2", ElementModes.ELEMENT_READWRITE);
XUIConfigurationStorage xConfigStorage =(XUIConfigurationStorage)UnoRuntime.queryInterface(XUIConfigurationStorage.class, xManager);
xConfigStorage.setStorage(xSubStorage);
tEnv.addObjRelation("XUIConfigurationStorage.Storage", xSubStorage);
}
catch(com.sun.star.uno.Exception e) {
log.println("Could not create storage.");
}
util.dbg.printInterfaces(xManager);
tEnv.addObjRelation("XUIConfiguration.XUIConfigurationListenerImpl",
new ConfigurationListener(log, xManager, xMSF));
return tEnv;
}
/**
* An implementation of the _XUIConfiguration.XUIConfigurationListenerImpl
* interface to trigger the event for a listener call.
* @see ifc.ui._XUIConfiguration
*/
public static class ConfigurationListener implements _XUIConfiguration.XUIConfigurationListenerImpl {
private boolean triggered = false;
private PrintWriter log = null;
private XUIConfigurationManager xUIManager = null;
private XMultiServiceFactory xMSF = null;
private static int iUniqueCounter;
public ConfigurationListener(PrintWriter _log, XUIConfigurationManager xUIManager, XMultiServiceFactory xMSF) {
log = _log;
this.xUIManager = xUIManager;
this.xMSF = xMSF;
iUniqueCounter = 0;
}
public void reset(){
triggered = false;
}
public void fireEvent() {
try {
if (iUniqueCounter == 0) {
iUniqueCounter++;
PropertyValue[][]props = xUIManager.getUIElementsInfo(UIElementType.UNKNOWN);
XIndexAccess xMenuBarSettings = xUIManager.getSettings(
"private:resource/menubar/menubar", true);
PropertyValue[]prop = _XUIConfigurationManager.createMenuBarEntry(
"Trigger Event", xMenuBarSettings, xMSF, log);
_XUIConfigurationManager.createMenuBarItem("Click for Macro",
(XIndexContainer)UnoRuntime.queryInterface(
XIndexContainer.class, prop[3].Value), log);
XIndexContainer x = (XIndexContainer)UnoRuntime.queryInterface(XIndexContainer.class, xMenuBarSettings);
x.insertByIndex(x.getCount(), prop);
xUIManager.replaceSettings("private:resource/menubar/menubar", xMenuBarSettings);
xUIManager.reset();
}
}
catch(com.sun.star.container.NoSuchElementException e) {
log.println("_XUIConfiguration.XUIConfigurationListenerImpl: Exception.");
e.printStackTrace(log);
}
catch(com.sun.star.lang.IllegalArgumentException e) {
log.println("_XUIConfiguration.XUIConfigurationListenerImpl: Exception.");
e.printStackTrace(log);
}
catch(com.sun.star.lang.IllegalAccessException e) {
log.println("_XUIConfiguration.XUIConfigurationListenerImpl: Exception.");
e.printStackTrace(log);
}
catch(com.sun.star.lang.IndexOutOfBoundsException e) {
log.println("_XUIConfiguration.XUIConfigurationListenerImpl: Exception.");
e.printStackTrace(log);
}
catch(com.sun.star.lang.WrappedTargetException e) {
log.println("_XUIConfiguration.XUIConfigurationListenerImpl: Exception.");
e.printStackTrace(log);
}
}
public boolean actionWasTriggered(){
return triggered;
}
public void disposing(EventObject e) {
triggered = true;
log.println("_XUIConfiguration.XUIConfigurationListenerImpl.disposing the listener.");
}
public void elementInserted(ConfigurationEvent configEvent) {
triggered = true;
log.println("_XUIConfiguration.XUIConfigurationListenerImpl.elementInserted.");
}
public void elementRemoved(ConfigurationEvent configEvent) {
triggered = true;
log.println("_XUIConfiguration.XUIConfigurationListenerImpl.elementRemoved.");
}
public void elementReplaced(ConfigurationEvent configEvent) {
triggered = true;
log.println("_XUIConfiguration.XUIConfigurationListenerImpl.elementReplaced.");
}
}
}
|
qadevOOo/tests/java/mod/_fwk/UIConfigurationManager.java
|
INTEGRATION: CWS layoutmanager (1.1.2); FILE ADDED
2004/02/20 13:39:05 sg 1.1.2.3: #i25017#CHG: changed hard file path to tdoc path
2004/02/19 15:54:59 sg 1.1.2.2: #i25017#CHG: enhanced tests
2004/02/19 09:05:26 sg 1.1.2.1: #i25017#NEW: initial version
|
qadevOOo/tests/java/mod/_fwk/UIConfigurationManager.java
|
INTEGRATION: CWS layoutmanager (1.1.2); FILE ADDED 2004/02/20 13:39:05 sg 1.1.2.3: #i25017#CHG: changed hard file path to tdoc path 2004/02/19 15:54:59 sg 1.1.2.2: #i25017#CHG: enhanced tests 2004/02/19 09:05:26 sg 1.1.2.1: #i25017#NEW: initial version
|
|
Java
|
agpl-3.0
|
5f21efb1cd94ffb4caee977f4204317e64045586
| 0
|
isokissa3/mcMMO,EvilOlaf/mcMMO
|
package com.gmail.nossr50.datatypes.player;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.DelayQueue;
import com.gmail.nossr50.mcMMO;
import com.gmail.nossr50.config.Config;
import com.gmail.nossr50.config.experience.ExperienceConfig;
import com.gmail.nossr50.datatypes.MobHealthbarType;
import com.gmail.nossr50.datatypes.experience.FormulaType;
import com.gmail.nossr50.datatypes.experience.SkillXpGain;
import com.gmail.nossr50.datatypes.skills.AbilityType;
import com.gmail.nossr50.datatypes.skills.SkillType;
import com.gmail.nossr50.runnables.player.PlayerProfileSaveTask;
import com.gmail.nossr50.skills.child.FamilyTree;
import com.gmail.nossr50.util.player.UserManager;
import com.google.common.collect.ImmutableMap;
public class PlayerProfile {
private final String playerName;
private UUID uuid;
private boolean loaded;
private volatile boolean changed;
/* HUDs */
private MobHealthbarType mobHealthbarType;
private int scoreboardTipsShown;
/* Skill Data */
private final Map<SkillType, Integer> skills = new HashMap<SkillType, Integer>(); // Skill & Level
private final Map<SkillType, Float> skillsXp = new HashMap<SkillType, Float>(); // Skill & XP
private final Map<AbilityType, Integer> abilityDATS = new HashMap<AbilityType, Integer>(); // Ability & Cooldown
// Store previous XP gains for deminished returns
private DelayQueue<SkillXpGain> gainedSkillsXp = new DelayQueue<SkillXpGain>();
private HashMap<SkillType, Float> rollingSkillsXp = new HashMap<SkillType, Float>();
@Deprecated
public PlayerProfile(String playerName) {
this(playerName, null);
}
public PlayerProfile(String playerName, UUID uuid) {
this.uuid = uuid;
this.playerName = playerName;
mobHealthbarType = Config.getInstance().getMobHealthbarDefault();
scoreboardTipsShown = 0;
for (AbilityType abilityType : AbilityType.values()) {
abilityDATS.put(abilityType, 0);
}
for (SkillType skillType : SkillType.NON_CHILD_SKILLS) {
skills.put(skillType, 0);
skillsXp.put(skillType, 0F);
}
}
@Deprecated
public PlayerProfile(String playerName, boolean isLoaded) {
this(playerName);
this.loaded = isLoaded;
}
public PlayerProfile(String playerName, UUID uuid, boolean isLoaded) {
this(playerName, uuid);
this.loaded = isLoaded;
}
public PlayerProfile(String playerName, UUID uuid, Map<SkillType, Integer> levelData, Map<SkillType, Float> xpData, Map<AbilityType, Integer> cooldownData, MobHealthbarType mobHealthbarType, int scoreboardTipsShown) {
this.playerName = playerName;
this.uuid = uuid;
this.mobHealthbarType = mobHealthbarType;
this.scoreboardTipsShown = scoreboardTipsShown;
skills.putAll(levelData);
skillsXp.putAll(xpData);
abilityDATS.putAll(cooldownData);
loaded = true;
}
public void scheduleAsyncSave() {
new PlayerProfileSaveTask(this).runTaskAsynchronously(mcMMO.p);
}
public void save() {
if (!changed || !loaded) {
return;
}
// TODO should this part be synchronized?
PlayerProfile profileCopy = new PlayerProfile(playerName, uuid, ImmutableMap.copyOf(skills), ImmutableMap.copyOf(skillsXp), ImmutableMap.copyOf(abilityDATS), mobHealthbarType, scoreboardTipsShown);
changed = !mcMMO.getDatabaseManager().saveUser(profileCopy);
if (changed) {
mcMMO.p.getLogger().warning("PlayerProfile saving failed for player: " + playerName + " " + uuid);
}
}
public String getPlayerName() {
return playerName;
}
public UUID getUniqueId() {
return uuid;
}
public void setUniqueId(UUID uuid) {
changed = true;
this.uuid = uuid;
}
public boolean isLoaded() {
return loaded;
}
/*
* Mob Healthbars
*/
public MobHealthbarType getMobHealthbarType() {
return mobHealthbarType;
}
public void setMobHealthbarType(MobHealthbarType mobHealthbarType) {
changed = true;
this.mobHealthbarType = mobHealthbarType;
}
public int getScoreboardTipsShown() {
return scoreboardTipsShown;
}
public void setScoreboardTipsShown(int scoreboardTipsShown) {
changed = true;
this.scoreboardTipsShown = scoreboardTipsShown;
}
public void increaseTipsShown() {
setScoreboardTipsShown(getScoreboardTipsShown() + 1);
}
/*
* Cooldowns
*/
/**
* Get the current deactivation timestamp of an ability.
*
* @param ability The {@link AbilityType} to get the DATS for
* @return the deactivation timestamp for the ability
*/
public long getAbilityDATS(AbilityType ability) {
return abilityDATS.get(ability);
}
/**
* Set the current deactivation timestamp of an ability.
*
* @param ability The {@link AbilityType} to set the DATS for
* @param DATS the DATS of the ability
*/
protected void setAbilityDATS(AbilityType ability, long DATS) {
changed = true;
abilityDATS.put(ability, (int) (DATS * .001D));
}
/**
* Reset all ability cooldowns.
*/
protected void resetCooldowns() {
changed = true;
for (AbilityType ability : abilityDATS.keySet()) {
abilityDATS.put(ability, 0);
}
}
/*
* Xp Functions
*/
public int getSkillLevel(SkillType skill) {
return skill.isChildSkill() ? getChildSkillLevel(skill) : skills.get(skill);
}
public float getSkillXpLevelRaw(SkillType skill) {
return skillsXp.get(skill);
}
public int getSkillXpLevel(SkillType skill) {
return (int) Math.floor(getSkillXpLevelRaw(skill));
}
public void setSkillXpLevel(SkillType skill, float xpLevel) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skillsXp.put(skill, xpLevel);
}
protected float levelUp(SkillType skill) {
float xpRemoved = getXpToLevel(skill);
changed = true;
skills.put(skill, skills.get(skill) + 1);
skillsXp.put(skill, skillsXp.get(skill) - xpRemoved);
return xpRemoved;
}
/**
* Remove Xp from a skill.
*
* @param skill Type of skill to modify
* @param xp Amount of xp to remove
*/
public void removeXp(SkillType skill, int xp) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skillsXp.put(skill, skillsXp.get(skill) - xp);
}
public void removeXp(SkillType skill, float xp) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skillsXp.put(skill, skillsXp.get(skill) - xp);
}
/**
* Modify a skill level.
*
* @param skill Type of skill to modify
* @param level New level value for the skill
*/
public void modifySkill(SkillType skill, int level) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skills.put(skill, level);
skillsXp.put(skill, 0F);
}
/**
* Add levels to a skill.
*
* @param skill Type of skill to add levels to
* @param levels Number of levels to add
*/
public void addLevels(SkillType skill, int levels) {
modifySkill(skill, skills.get(skill) + levels);
}
/**
* Add Experience to a skill.
*
* @param skill Type of skill to add experience to
* @param xp Number of experience to add
*/
public void addXp(SkillType skill, float xp) {
changed = true;
if (skill.isChildSkill()) {
Set<SkillType> parentSkills = FamilyTree.getParents(skill);
float dividedXP = (xp / parentSkills.size());
for (SkillType parentSkill : parentSkills) {
skillsXp.put(parentSkill, skillsXp.get(parentSkill) + dividedXP);
}
}
else {
skillsXp.put(skill, skillsXp.get(skill) + xp);
}
}
/**
* Get the registered amount of experience gained
* This is used for diminished XP returns
*
* @return xp Experience amount registered
*/
public float getRegisteredXpGain(SkillType skillType) {
float xp = 0F;
if (rollingSkillsXp.get(skillType) != null) {
xp = rollingSkillsXp.get(skillType);
}
return xp;
}
/**
* Register an experience gain
* This is used for diminished XP returns
*
* @param skillType Skill being used
* @param xp Experience amount to add
*/
public void registerXpGain(SkillType skillType, float xp) {
gainedSkillsXp.add(new SkillXpGain(skillType, xp));
rollingSkillsXp.put(skillType, getRegisteredXpGain(skillType) + xp);
}
/**
* Remove experience gains older than a given time
* This is used for diminished XP returns
*/
public void purgeExpiredXpGains() {
SkillXpGain gain;
while ((gain = gainedSkillsXp.poll()) != null) {
rollingSkillsXp.put(gain.getSkill(), getRegisteredXpGain(gain.getSkill()) - gain.getXp());
}
}
/**
* Get the amount of Xp remaining before the next level.
*
* @param skillType Type of skill to check
* @return the total amount of Xp until next level
*/
public int getXpToLevel(SkillType skillType) {
int level = (ExperienceConfig.getInstance().getCumulativeCurveEnabled()) ? UserManager.getPlayer(playerName).getPowerLevel() : skills.get(skillType);
FormulaType formulaType = ExperienceConfig.getInstance().getFormulaType();
return mcMMO.getFormulaManager().getCachedXpToLevel(level, formulaType);
}
private int getChildSkillLevel(SkillType skillType) {
Set<SkillType> parents = FamilyTree.getParents(skillType);
int sum = 0;
for (SkillType parent : parents) {
sum += Math.min(getSkillLevel(parent), parent.getMaxLevel());
}
return sum / parents.size();
}
}
|
src/main/java/com/gmail/nossr50/datatypes/player/PlayerProfile.java
|
package com.gmail.nossr50.datatypes.player;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.DelayQueue;
import com.gmail.nossr50.mcMMO;
import com.gmail.nossr50.config.Config;
import com.gmail.nossr50.config.experience.ExperienceConfig;
import com.gmail.nossr50.datatypes.MobHealthbarType;
import com.gmail.nossr50.datatypes.experience.FormulaType;
import com.gmail.nossr50.datatypes.experience.SkillXpGain;
import com.gmail.nossr50.datatypes.skills.AbilityType;
import com.gmail.nossr50.datatypes.skills.SkillType;
import com.gmail.nossr50.runnables.player.PlayerProfileSaveTask;
import com.gmail.nossr50.skills.child.FamilyTree;
import com.gmail.nossr50.util.player.UserManager;
import com.google.common.collect.ImmutableMap;
public class PlayerProfile {
private final String playerName;
private UUID uuid;
private boolean loaded;
private volatile boolean changed;
/* HUDs */
private MobHealthbarType mobHealthbarType;
private int scoreboardTipsShown;
/* Skill Data */
private final Map<SkillType, Integer> skills = new HashMap<SkillType, Integer>(); // Skill & Level
private final Map<SkillType, Float> skillsXp = new HashMap<SkillType, Float>(); // Skill & XP
private final Map<AbilityType, Integer> abilityDATS = new HashMap<AbilityType, Integer>(); // Ability & Cooldown
// Store previous XP gains for deminished returns
private DelayQueue<SkillXpGain> gainedSkillsXp = new DelayQueue<SkillXpGain>();
private HashMap<SkillType, Float> rollingSkillsXp = new HashMap<SkillType, Float>();
@Deprecated
public PlayerProfile(String playerName) {
this(playerName, null);
}
public PlayerProfile(String playerName, UUID uuid) {
this.uuid = uuid;
this.playerName = playerName;
mobHealthbarType = Config.getInstance().getMobHealthbarDefault();
scoreboardTipsShown = 0;
for (AbilityType abilityType : AbilityType.values()) {
abilityDATS.put(abilityType, 0);
}
for (SkillType skillType : SkillType.NON_CHILD_SKILLS) {
skills.put(skillType, 0);
skillsXp.put(skillType, 0F);
}
}
@Deprecated
public PlayerProfile(String playerName, boolean isLoaded) {
this(playerName);
this.loaded = isLoaded;
}
public PlayerProfile(String playerName, UUID uuid, boolean isLoaded) {
this(playerName, uuid);
this.loaded = isLoaded;
}
public PlayerProfile(String playerName, UUID uuid, Map<SkillType, Integer> levelData, Map<SkillType, Float> xpData, Map<AbilityType, Integer> cooldownData, MobHealthbarType mobHealthbarType, int scoreboardTipsShown) {
this.playerName = playerName;
this.uuid = uuid;
this.mobHealthbarType = mobHealthbarType;
this.scoreboardTipsShown = scoreboardTipsShown;
skills.putAll(levelData);
skillsXp.putAll(xpData);
abilityDATS.putAll(cooldownData);
loaded = true;
}
public void scheduleAsyncSave() {
new PlayerProfileSaveTask(this).runTaskAsynchronously(mcMMO.p);
}
public void save() {
if (!changed || !loaded) {
return;
}
// TODO should this part be synchronized?
PlayerProfile profileCopy = new PlayerProfile(playerName, uuid, ImmutableMap.copyOf(skills), ImmutableMap.copyOf(skillsXp), ImmutableMap.copyOf(abilityDATS), mobHealthbarType, scoreboardTipsShown);
changed = !mcMMO.getDatabaseManager().saveUser(profileCopy);
if (changed) {
mcMMO.p.getLogger().warning("PlayerProfile saving failed for player: " + playerName + " " + uuid);
}
}
public String getPlayerName() {
return playerName;
}
public UUID getUniqueId() {
return uuid;
}
public void setUniqueId(UUID uuid) {
changed = true;
this.uuid = uuid;
}
public boolean isLoaded() {
return loaded;
}
/*
* Mob Healthbars
*/
public MobHealthbarType getMobHealthbarType() {
return mobHealthbarType;
}
public void setMobHealthbarType(MobHealthbarType mobHealthbarType) {
changed = true;
this.mobHealthbarType = mobHealthbarType;
}
public int getScoreboardTipsShown() {
return scoreboardTipsShown;
}
public void setScoreboardTipsShown(int scoreboardTipsShown) {
changed = true;
this.scoreboardTipsShown = scoreboardTipsShown;
}
public void increaseTipsShown() {
setScoreboardTipsShown(getScoreboardTipsShown() + 1);
}
/*
* Cooldowns
*/
/**
* Get the current deactivation timestamp of an ability.
*
* @param ability The {@link AbilityType} to get the DATS for
* @return the deactivation timestamp for the ability
*/
public long getAbilityDATS(AbilityType ability) {
return abilityDATS.get(ability);
}
/**
* Set the current deactivation timestamp of an ability.
*
* @param ability The {@link AbilityType} to set the DATS for
* @param DATS the DATS of the ability
*/
protected void setAbilityDATS(AbilityType ability, long DATS) {
changed = true;
abilityDATS.put(ability, (int) (DATS * .001D));
}
/**
* Reset all ability cooldowns.
*/
protected void resetCooldowns() {
changed = true;
for (AbilityType ability : abilityDATS.keySet()) {
abilityDATS.put(ability, 0);
}
}
/*
* Xp Functions
*/
public int getSkillLevel(SkillType skill) {
return skill.isChildSkill() ? getChildSkillLevel(skill) : skills.get(skill);
}
public float getSkillXpLevelRaw(SkillType skill) {
return skillsXp.get(skill);
}
public int getSkillXpLevel(SkillType skill) {
return (int) Math.floor(getSkillXpLevelRaw(skill));
}
public void setSkillXpLevel(SkillType skill, float xpLevel) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skillsXp.put(skill, xpLevel);
}
protected float levelUp(SkillType skill) {
float xpRemoved = getXpToLevel(skill);
changed = true;
skills.put(skill, skills.get(skill) + 1);
skillsXp.put(skill, skillsXp.get(skill) - xpRemoved);
return xpRemoved;
}
/**
* Remove Xp from a skill.
*
* @param skill Type of skill to modify
* @param xp Amount of xp to remove
*/
public void removeXp(SkillType skill, int xp) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skillsXp.put(skill, skillsXp.get(skill) - xp);
}
public void removeXp(SkillType skill, float xp) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skillsXp.put(skill, skillsXp.get(skill) - xp);
}
/**
* Modify a skill level.
*
* @param skill Type of skill to modify
* @param level New level value for the skill
*/
public void modifySkill(SkillType skill, int level) {
if (skill.isChildSkill()) {
return;
}
changed = true;
skills.put(skill, level);
skillsXp.put(skill, 0F);
}
/**
* Add levels to a skill.
*
* @param skill Type of skill to add levels to
* @param levels Number of levels to add
*/
public void addLevels(SkillType skill, int levels) {
modifySkill(skill, skills.get(skill) + levels);
}
/**
* Add Experience to a skill.
*
* @param skill Type of skill to add experience to
* @param xp Number of experience to add
*/
public void addXp(SkillType skill, float xp) {
changed = true;
if (skill.isChildSkill()) {
Set<SkillType> parentSkills = FamilyTree.getParents(skill);
float dividedXP = (xp / parentSkills.size());
for (SkillType parentSkill : parentSkills) {
skillsXp.put(parentSkill, skillsXp.get(parentSkill) + dividedXP);
}
}
else {
skillsXp.put(skill, skillsXp.get(skill) + xp);
}
}
/**
* Get the registered amount of experience gained
* This is used for diminished XP returns
*
* @return xp Experience amount registered
*/
public float getRegisteredXpGain(SkillType skillType) {
float xp = 0F;
if (rollingSkillsXp.get(skillType) != null) {
xp = rollingSkillsXp.get(skillType);
}
return xp;
}
/**
* Register an experience gain
* This is used for diminished XP returns
*
* @param skillType Skill being used
* @param xp Experience amount to add
*/
public void registerXpGain(SkillType skillType, float xp) {
gainedSkillsXp.add(new SkillXpGain(skillType, xp));
rollingSkillsXp.put(skillType, getRegisteredXpGain(skillType) + xp);
}
/**
* Remove experience gains older than a given time
* This is used for diminished XP returns
*/
public void purgeExpiredXpGains() {
SkillXpGain gain;
while ((gain = gainedSkillsXp.poll()) != null) {
rollingSkillsXp.put(gain.getSkill(), getRegisteredXpGain(gain.getSkill()) - gain.getXp());
}
}
/**
* Get the amount of Xp remaining before the next level.
*
* @param skillType Type of skill to check
* @return the total amount of Xp until next level
*/
public int getXpToLevel(SkillType skillType) {
int level = (ExperienceConfig.getInstance().getCumulativeCurveEnabled()) ? UserManager.getPlayer(playerName).getPowerLevel() : skills.get(skillType);
FormulaType formulaType = ExperienceConfig.getInstance().getFormulaType();
return mcMMO.getFormulaManager().getCachedXpToLevel(level, formulaType);
}
private int getChildSkillLevel(SkillType skillType) {
Set<SkillType> parents = FamilyTree.getParents(skillType);
int sum = 0;
for (SkillType parent : parents) {
sum += Math.min(getSkillLevel(parent), 1000);
}
return sum / parents.size();
}
}
|
Remove hardcoded limit for parent skill levels when calculating child skill level
|
src/main/java/com/gmail/nossr50/datatypes/player/PlayerProfile.java
|
Remove hardcoded limit for parent skill levels when calculating child skill level
|
|
Java
|
agpl-3.0
|
error: pathspec 'config/KSMetaData.java' did not match any file(s) known to git
|
a5aefb5ef26e28a76b900ced5ad928699f18ced4
| 1
|
duarten/scylla,kjniemi/scylla,shaunstanislaus/scylla,guiquanz/scylla,victorbriz/scylla,scylladb/scylla,phonkee/scylla,senseb/scylla,bowlofstew/scylla,glommer/scylla,respu/scylla,dwdm/scylla,acbellini/scylla,guiquanz/scylla,duarten/scylla,dwdm/scylla,scylladb/scylla,wildinto/scylla,bowlofstew/scylla,gwicke/scylla,asias/scylla,stamhe/scylla,tempbottle/scylla,stamhe/scylla,phonkee/scylla,justintung/scylla,bowlofstew/scylla,tempbottle/scylla,scylladb/scylla,kjniemi/scylla,eklitzke/scylla,gwicke/scylla,aruanruan/scylla,kangkot/scylla,capturePointer/scylla,rluta/scylla,raphaelsc/scylla,duarten/scylla,eklitzke/scylla,linearregression/scylla,scylladb/scylla,rluta/scylla,linearregression/scylla,asias/scylla,justintung/scylla,stamhe/scylla,kangkot/scylla,avikivity/scylla,capturePointer/scylla,acbellini/scylla,tempbottle/scylla,raphaelsc/scylla,capturePointer/scylla,eklitzke/scylla,justintung/scylla,respu/scylla,aruanruan/scylla,wildinto/scylla,kangkot/scylla,victorbriz/scylla,rentongzhang/scylla,senseb/scylla,victorbriz/scylla,kjniemi/scylla,raphaelsc/scylla,glommer/scylla,asias/scylla,phonkee/scylla,rentongzhang/scylla,guiquanz/scylla,rluta/scylla,gwicke/scylla,senseb/scylla,acbellini/scylla,rentongzhang/scylla,shaunstanislaus/scylla,avikivity/scylla,dwdm/scylla,wildinto/scylla,aruanruan/scylla,linearregression/scylla,shaunstanislaus/scylla,glommer/scylla,respu/scylla,avikivity/scylla
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.config;
import java.util.*;
import com.google.common.base.Objects;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.locator.*;
import org.apache.cassandra.service.StorageService;
public final class KSMetaData
{
public final String name;
public final Class<? extends AbstractReplicationStrategy> strategyClass;
public final Map<String, String> strategyOptions;
private final Map<String, CFMetaData> cfMetaData;
public final boolean durableWrites;
public final UTMetaData userTypes;
public KSMetaData(String name,
Class<? extends AbstractReplicationStrategy> strategyClass,
Map<String, String> strategyOptions,
boolean durableWrites)
{
this(name, strategyClass, strategyOptions, durableWrites, Collections.<CFMetaData>emptyList(), new UTMetaData());
}
public KSMetaData(String name,
Class<? extends AbstractReplicationStrategy> strategyClass,
Map<String, String> strategyOptions,
boolean durableWrites,
Iterable<CFMetaData> cfDefs)
{
this(name, strategyClass, strategyOptions, durableWrites, cfDefs, new UTMetaData());
}
private KSMetaData(String name,
Class<? extends AbstractReplicationStrategy> strategyClass,
Map<String, String> strategyOptions,
boolean durableWrites,
Iterable<CFMetaData> cfDefs,
UTMetaData userTypes)
{
this.name = name;
this.strategyClass = strategyClass == null ? NetworkTopologyStrategy.class : strategyClass;
this.strategyOptions = strategyOptions;
Map<String, CFMetaData> cfmap = new HashMap<>();
for (CFMetaData cfm : cfDefs)
cfmap.put(cfm.cfName, cfm);
this.cfMetaData = Collections.unmodifiableMap(cfmap);
this.durableWrites = durableWrites;
this.userTypes = userTypes;
}
// For new user created keyspaces (through CQL)
public static KSMetaData newKeyspace(String name, String strategyName, Map<String, String> options, boolean durableWrites) throws ConfigurationException
{
Class<? extends AbstractReplicationStrategy> cls = AbstractReplicationStrategy.getClass(strategyName);
if (cls.equals(LocalStrategy.class))
throw new ConfigurationException("Unable to use given strategy class: LocalStrategy is reserved for internal use.");
return newKeyspace(name, cls, options, durableWrites, Collections.<CFMetaData>emptyList());
}
public static KSMetaData newKeyspace(String name, Class<? extends AbstractReplicationStrategy> strategyClass, Map<String, String> options, boolean durablesWrites, Iterable<CFMetaData> cfDefs)
{
return new KSMetaData(name, strategyClass, options, durablesWrites, cfDefs, new UTMetaData());
}
public KSMetaData cloneWithTableRemoved(CFMetaData table)
{
// clone ksm but do not include the new table
List<CFMetaData> newTables = new ArrayList<>(cfMetaData().values());
newTables.remove(table);
assert newTables.size() == cfMetaData().size() - 1;
return cloneWith(newTables, userTypes);
}
public KSMetaData cloneWithTableAdded(CFMetaData table)
{
// clone ksm but include the new table
List<CFMetaData> newTables = new ArrayList<>(cfMetaData().values());
newTables.add(table);
assert newTables.size() == cfMetaData().size() + 1;
return cloneWith(newTables, userTypes);
}
public KSMetaData cloneWith(Iterable<CFMetaData> tables, UTMetaData types)
{
return new KSMetaData(name, strategyClass, strategyOptions, durableWrites, tables, types);
}
public static KSMetaData testMetadata(String name, Class<? extends AbstractReplicationStrategy> strategyClass, Map<String, String> strategyOptions, CFMetaData... cfDefs)
{
return new KSMetaData(name, strategyClass, strategyOptions, true, Arrays.asList(cfDefs));
}
public static KSMetaData testMetadataNotDurable(String name, Class<? extends AbstractReplicationStrategy> strategyClass, Map<String, String> strategyOptions, CFMetaData... cfDefs)
{
return new KSMetaData(name, strategyClass, strategyOptions, false, Arrays.asList(cfDefs));
}
@Override
public int hashCode()
{
return Objects.hashCode(name, strategyClass, strategyOptions, cfMetaData, durableWrites, userTypes);
}
@Override
public boolean equals(Object o)
{
if (this == o)
return true;
if (!(o instanceof KSMetaData))
return false;
KSMetaData other = (KSMetaData) o;
return Objects.equal(name, other.name)
&& Objects.equal(strategyClass, other.strategyClass)
&& Objects.equal(strategyOptions, other.strategyOptions)
&& Objects.equal(cfMetaData, other.cfMetaData)
&& Objects.equal(durableWrites, other.durableWrites)
&& Objects.equal(userTypes, other.userTypes);
}
public Map<String, CFMetaData> cfMetaData()
{
return cfMetaData;
}
@Override
public String toString()
{
return Objects.toStringHelper(this)
.add("name", name)
.add("strategyClass", strategyClass.getSimpleName())
.add("strategyOptions", strategyOptions)
.add("cfMetaData", cfMetaData)
.add("durableWrites", durableWrites)
.add("userTypes", userTypes)
.toString();
}
public static Map<String,String> optsWithRF(final Integer rf)
{
return Collections.singletonMap("replication_factor", rf.toString());
}
public KSMetaData validate() throws ConfigurationException
{
if (!CFMetaData.isNameValid(name))
throw new ConfigurationException(String.format("Keyspace name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", Schema.NAME_LENGTH, name));
// Attempt to instantiate the ARS, which will throw a ConfigException if the strategy_options aren't fully formed
TokenMetadata tmd = StorageService.instance.getTokenMetadata();
IEndpointSnitch eps = DatabaseDescriptor.getEndpointSnitch();
AbstractReplicationStrategy.validateReplicationStrategy(name, strategyClass, tmd, eps, strategyOptions);
for (CFMetaData cfm : cfMetaData.values())
cfm.validate();
return this;
}
}
|
config/KSMetaData.java
|
config: Import KSMetaData.java
Signed-off-by: Pekka Enberg <add4fcd06328a394f0ad91feda7ee057316dc5ed@cloudius-systems.com>
|
config/KSMetaData.java
|
config: Import KSMetaData.java
|
|
Java
|
agpl-3.0
|
error: pathspec 'src/main/omr/glyph/ui/TrainingPanel.java' did not match any file(s) known to git
|
eac21b6a36b872f7a9cf235c4b590cc3708708f1
| 1
|
Audiveris/audiveris,Audiveris/audiveris
|
//----------------------------------------------------------------------------//
// //
// T r a i n i n g P a n e l //
// //
// Copyright (C) Herve Bitteur 2000-2006. All rights reserved. //
// This software is released under the terms of the GNU General Public //
// License. Please contact the author at herve.bitteur@laposte.net //
// to report bugs & suggestions. //
//----------------------------------------------------------------------------//
//
package omr.glyph.ui;
import omr.glyph.Evaluator;
import omr.glyph.Glyph;
import omr.glyph.GlyphNetwork;
import omr.glyph.Shape;
import static omr.glyph.Shape.*;
import static omr.glyph.ui.GlyphTrainer.Task.Activity.*;
import omr.math.NeuralNetwork;
import omr.ui.util.Panel;
import omr.util.Implement;
import omr.util.Logger;
import com.jgoodies.forms.builder.*;
import com.jgoodies.forms.layout.*;
import java.awt.event.*;
import java.util.*;
import javax.swing.*;
/**
* Class <code>TrainingPanel</code> is a panel dedicated to the training of an
* evaluator. This class was common to several evaluators, it is now used only
* through its subclass {@link NetworkPanel} to train just the neural network
* evaluator.
*
* @author Hervé Bitteur
* @version $Id$
*/
public class TrainingPanel
extends Panel
implements Evaluator.Monitor, Observer
{
//~ Static fields/initializers ---------------------------------------------
private static final Logger logger = Logger.getLogger(
TrainingPanel.class);
//~ Instance fields --------------------------------------------------------
/** Current activity (selecting the population, or training the evaluator on
the selected population */
protected final GlyphTrainer.Task task;
/** User action to launch the training */
protected TrainAction trainAction;
/** The underlying evaluator to be trained */
protected Evaluator evaluator;
/** User progress bar to visualize the training process */
protected JProgressBar progressBar = new JProgressBar();
/** Common JGoogies constraints for this class and its subclass if any */
protected CellConstraints cst = new CellConstraints();
/** Common JGoogies builder for this class and its subclass if any */
protected PanelBuilder builder;
/** Repository of known glyphs */
private final GlyphRepository repository = GlyphRepository.getInstance();
/** Flag to indicate that the whole population of recorded glyphs (and not
just the core ones) is to be considered */
private boolean useWhole = true;
/** UI panel dealing with repository selection */
private final SelectionPanel selectionPanel;
/** The Neural Network evaluator */
private GlyphNetwork network = GlyphNetwork.getInstance();
//~ Constructors -----------------------------------------------------------
//---------------//
// TrainingPanel //
//---------------//
/**
* Creates a new TrainingPanel object.
*
* @param task the current training task
* @param standardWidth standard width for fields & buttons
* @param evaluator the underlying evaluator to train
* @param selectionPanel user panel for glyphs selection
* @param totalRows total number of display rows, interlines not counted
*/
public TrainingPanel (GlyphTrainer.Task task,
String standardWidth,
Evaluator evaluator,
SelectionPanel selectionPanel,
int totalRows)
{
this.evaluator = evaluator;
this.task = task;
this.selectionPanel = selectionPanel;
FormLayout layout = Panel.makeFormLayout(
totalRows,
4,
"",
standardWidth,
standardWidth);
builder = new PanelBuilder(layout, this);
builder.setDefaultDialogBorder(); // Useful ?
defineLayout();
}
//~ Methods ----------------------------------------------------------------
@Implement(NeuralNetwork.Monitor.class)
public void epochEnded (int epochIndex,
double mse)
{
}
@Implement(Evaluator.Monitor.class)
public void glyphProcessed (final Glyph glyph)
{
}
@Implement(NeuralNetwork.Monitor.class)
public void trainingStarted (final int epochIndex,
final double mse)
{
}
//--------//
// update //
//--------//
@Implement(Observer.class)
public void update (Observable obs,
Object unused)
{
switch (task.getActivity()) {
case INACTIVE :
trainAction.setEnabled(true);
break;
case SELECTING :
trainAction.setEnabled(false);
break;
case TRAINING :
trainAction.setEnabled(false);
break;
}
}
//----------//
// useWhole //
//----------//
/**
* Tell whether the whole glyph base is to be used, or just the core base
*
* @return true if whole, false if core
*/
public boolean useWhole ()
{
return useWhole;
}
//--------------//
// defineLayout //
//--------------//
protected void defineLayout ()
{
// Buttons to select just the core glyphs, or the whole population
JRadioButton coreButton = new JRadioButton(new CoreAction());
JRadioButton wholeButton = new JRadioButton(new WholeAction());
// Group the radio buttons.
ButtonGroup group = new ButtonGroup();
group.add(wholeButton);
wholeButton.setToolTipText("Use the whole glyph base for any action");
group.add(coreButton);
coreButton.setToolTipText(
"Use only the core glyph base for any action");
wholeButton.setSelected(true);
// Evaluator Title & Progress Bar
int r = 1; // ----------------------------
builder.addSeparator("Training", cst.xyw(1, r, 7));
builder.add(progressBar, cst.xyw(9, r, 7));
r += 2; // ----------------------------
builder.add(wholeButton, cst.xy(1, r));
r += 2; // ----------------------------
builder.add(coreButton, cst.xy(1, r));
}
//-----------------//
// checkPopulation //
//-----------------//
private void checkPopulation (List<Glyph> glyphs)
{
// Check that all trainable shapes are present in the training
// population and that only legal shapes are present. If illegal
// (non trainable) shapes are found, they are removed from the
// population.
boolean[] present = new boolean[LastPhysicalShape.ordinal() + 1];
Arrays.fill(present, false);
for (Iterator<Glyph> it = glyphs.iterator(); it.hasNext();) {
Glyph glyph = it.next();
int index = glyph.getShape()
.ordinal();
if (index >= present.length) {
logger.warning(
"Removing not trainable shape:" + glyph.getShape());
it.remove();
} else {
present[index] = true;
}
}
for (int i = 0; i < present.length; i++) {
if (!present[i]) {
logger.warning("Missing shape: " + Shape.values()[i]);
}
}
}
//~ Inner Classes ----------------------------------------------------------
//------------//
// DumpAction //
//------------//
protected class DumpAction
extends AbstractAction
{
public DumpAction ()
{
super("Dump");
}
@Implement(ActionListener.class)
public void actionPerformed (ActionEvent e)
{
evaluator.dump();
}
}
//-------------//
// TrainAction //
//-------------//
protected class TrainAction
extends AbstractAction
{
// Specific training starting mode
protected Evaluator.StartingMode mode = Evaluator.StartingMode.SCRATCH;
protected boolean confirmationRequired = true;
public TrainAction (String title)
{
super(title);
}
@Implement(ActionListener.class)
public void actionPerformed (ActionEvent e)
{
// Ask user confirmation
if (confirmationRequired) {
int answer = JOptionPane.showConfirmDialog(
TrainingPanel.this,
"Do you really want to retrain from scratch ?");
if (answer != JOptionPane.YES_OPTION) {
return;
}
}
class Worker
extends Thread
{
public void run ()
{
train();
}
}
Worker worker = new Worker();
worker.setPriority(Thread.MIN_PRIORITY);
worker.start();
}
//-------//
// train //
//-------//
public void train ()
{
task.setActivity(TRAINING);
Collection<String> gNames = selectionPanel.getBase(useWhole);
progressBar.setValue(0);
progressBar.setMaximum(network.getListEpochs());
List<Glyph> glyphs = new ArrayList<Glyph>();
for (String gName : gNames) {
glyphs.add(repository.getGlyph(gName));
}
// Check that all trainable shapes (and only those ones) are
// present in the training population
checkPopulation(glyphs);
evaluator.train(glyphs, TrainingPanel.this, mode);
task.setActivity(INACTIVE);
}
}
//------------//
// CoreAction //
//------------//
private class CoreAction
extends AbstractAction
{
public CoreAction ()
{
super("Core");
}
@Implement(ActionListener.class)
public void actionPerformed (ActionEvent e)
{
useWhole = false;
}
}
//-------------//
// WholeAction //
//-------------//
private class WholeAction
extends AbstractAction
{
public WholeAction ()
{
super("Whole");
}
@Implement(ActionListener.class)
public void actionPerformed (ActionEvent e)
{
useWhole = true;
}
}
}
|
src/main/omr/glyph/ui/TrainingPanel.java
|
User panel to handle the general training of an evaluator
|
src/main/omr/glyph/ui/TrainingPanel.java
|
User panel to handle the general training of an evaluator
|
|
Java
|
agpl-3.0
|
error: pathspec 'src/java/nl/b3p/viewer/stripes/FeatureInfoActionBean.java' did not match any file(s) known to git
|
d9f4e75d5cdd62fb136a2ed7fbb93ae5955fbeb3
| 1
|
flamingo-geocms/flamingo,flamingo-geocms/flamingo,flamingo-geocms/flamingo,B3Partners/flamingo,B3Partners/flamingo,flamingo-geocms/flamingo,B3Partners/flamingo,mvdstruijk/flamingo,mvdstruijk/flamingo,B3Partners/flamingo,mvdstruijk/flamingo,mvdstruijk/flamingo
|
/*
* Copyright (C) 2012 B3Partners B.V.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package nl.b3p.viewer.stripes;
import java.io.IOException;
import java.io.StringReader;
import java.util.*;
import net.sourceforge.stripes.action.*;
import net.sourceforge.stripes.validation.Validate;
import nl.b3p.geotools.filter.visitor.RemoveDistanceUnit;
import nl.b3p.viewer.config.app.ApplicationLayer;
import nl.b3p.viewer.config.app.ConfiguredAttribute;
import nl.b3p.viewer.config.services.AttributeDescriptor;
import nl.b3p.viewer.config.services.GeoService;
import nl.b3p.viewer.config.services.Layer;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.geotools.data.FeatureSource;
import org.geotools.data.Query;
import org.geotools.factory.CommonFactoryFinder;
import org.geotools.feature.FeatureIterator;
import org.geotools.filter.text.cql2.CQL;
import org.geotools.filter.visitor.DuplicatingFilterVisitor;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.filter.Filter;
import org.opengis.filter.FilterVisitor;
import org.opengis.filter.spatial.DWithin;
import org.stripesstuff.stripersist.Stripersist;
/**
*
* @author Matthijs Laan
*/
@UrlBinding("/action/featureinfo")
@StrictBinding
public class FeatureInfoActionBean implements ActionBean {
private static final Log log = LogFactory.getLog(FeatureInfoActionBean.class);
private ActionBeanContext context;
@Validate
private int limit = 10;
@Validate
private double x;
@Validate
private double y;
@Validate
private double distance;
@Validate
private String queryJSON;
//<editor-fold defaultstate="collapsed" desc="getters and setters">
public ActionBeanContext getContext() {
return context;
}
public void setContext(ActionBeanContext context) {
this.context = context;
}
public int getLimit() {
return limit;
}
public void setLimit(int limit) {
this.limit = limit;
}
public double getDistance() {
return distance;
}
public void setDistance(double distance) {
this.distance = distance;
}
public String getQueryJSON() {
return queryJSON;
}
public void setQueryJSON(String queryJSON) {
this.queryJSON = queryJSON;
}
public double getX() {
return x;
}
public void setX(double x) {
this.x = x;
}
public double getY() {
return y;
}
public void setY(double y) {
this.y = y;
}
//</editor-fold>
private List<String> setPropertyNames(ApplicationLayer appLayer, Query q) {
List<String> propertyNames = new ArrayList<String>();
boolean haveInvisibleProperties = false;
for(ConfiguredAttribute ca: appLayer.getAttributes()) {
if(ca.isVisible()) {
propertyNames.add(ca.getAttributeName());
} else {
haveInvisibleProperties = true;
}
}
if(haveInvisibleProperties) {
// By default Query retrieves Query.ALL_NAMES
// Query.NO_NAMES is an empty String array
q.setPropertyNames(propertyNames);
}
return propertyNames;
}
public Resolution info() throws JSONException {
JSONArray queries = new JSONArray(queryJSON);
JSONArray responses = new JSONArray();
for(int i = 0; i < queries.length(); i++) {
JSONObject query = queries.getJSONObject(i);
JSONObject response = new JSONObject();
responses.put(response);
response.put("request", query);
String error = null;
String exceptionMsg = query.toString();
try {
ApplicationLayer al = null;
GeoService gs = null;
if(query.has("appLayer")) {
al = Stripersist.getEntityManager().find(ApplicationLayer.class, query.getLong("appLayer"));
} else {
gs = Stripersist.getEntityManager().find(GeoService.class, query.getLong("service"));
}
do {
if(al == null && gs == null) {
error = "App layer or service not found";
break;
}
Layer l = null;
if(al != null) {
l = al.getService().getLayer(al.getLayerName());
} else {
l = gs.getLayer(query.getString("layer"));
}
if(l == null) {
error = "Layer not found";
break;
}
if(l.getFeatureType() == null) {
error = "Layer has no feature type";
}
String filter = query.optString("filter", null);
FeatureSource fs = l.getFeatureType().openGeoToolsFeatureSource();
Query q = new Query(fs.getName().toString());
List<String> propertyNames;
if(al != null) {
propertyNames = setPropertyNames(al, q);
} else {
propertyNames = new ArrayList<String>();
for(AttributeDescriptor ad: l.getFeatureType().getAttributes()) {
propertyNames.add(ad.getName());
}
}
String geomAttribute = fs.getSchema().getGeometryDescriptor().getLocalName();
String dwithin = String.format("DWITHIN(\"%s\", POINT(%f %f), %f, meters)",
geomAttribute,
x,
y,
distance);
filter = filter != null ? "(" + dwithin + ") AND (" + filter + ")" : dwithin;
Filter f = CQL.toFilter(filter);
f = (Filter)f.accept(new RemoveDistanceUnit(), null);
q.setFilter(f);
q.setMaxFeatures(limit);
JSONArray features = getJSONFeatures(fs, q, propertyNames);
response.put("features", features);
} while(false);
} catch(Exception e) {
log.error("Exception loading feature info for " + exceptionMsg, e);
error = "Exception: " + e.toString();
} finally {
if(error != null) {
response.put("error", error);
}
}
}
return new StreamingResolution("application/json", new StringReader(responses.toString(4)));
}
private static JSONArray getJSONFeatures(FeatureSource fs, Query q, List<String> propertyNames) throws IOException, JSONException {
FeatureIterator<SimpleFeature> it = fs.getFeatures(q).features();
JSONArray features = new JSONArray();
try {
while(it.hasNext()) {
SimpleFeature f = it.next();
JSONObject j = new JSONObject();
j.put("id", f.getID());
for(String name: propertyNames) {
j.put(name, f.getAttribute(name));
}
features.put(j);
}
return features;
} finally {
it.close();
fs.getDataStore().dispose();
}
}
}
|
src/java/nl/b3p/viewer/stripes/FeatureInfoActionBean.java
|
added feature info actionbean
|
src/java/nl/b3p/viewer/stripes/FeatureInfoActionBean.java
|
added feature info actionbean
|
|
Java
|
agpl-3.0
|
error: pathspec 'app/src/main/java/com/garethevans/church/opensongtablet/SAFRead.java' did not match any file(s) known to git
|
585bf2d8a3b17963680803972ab782650a87dcd3
| 1
|
thebigg73/OpenSongTablet,thebigg73/OpenSongTablet,thebigg73/OpenSongTablet
|
// This file will be used to read in xml files using Android's Storage Access Framework
|
app/src/main/java/com/garethevans/church/opensongtablet/SAFRead.java
|
Create SAFRead.java
|
app/src/main/java/com/garethevans/church/opensongtablet/SAFRead.java
|
Create SAFRead.java
|
|
Java
|
lgpl-2.1
|
error: pathspec 'src/dr/evoxml/MicrosatellitePatternParser.java' did not match any file(s) known to git
|
6f45325359ba6d40064ad8d4492d37dc62d9b500
| 1
|
svn2github/beast-mcmc,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,svn2github/beast-mcmc,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox,svn2github/beast-mcmc,armanbilge/BEAST_sandbox,armanbilge/BEAST_sandbox
|
package dr.evoxml;
import dr.xml.*;
import dr.evolution.util.Taxa;
import dr.evolution.datatype.Microsatellite;
import dr.evolution.alignment.Patterns;
import java.util.logging.Logger;
/**
* @author Chieh-Hsi Wu
* Date: 18/07/2009
* Time: 12:14:19 PM
* To change this template use File | Settings | File Templates.
*/
public class MicrosatellitePatternParser extends AbstractXMLObjectParser {
public static final String MICROSATPATTERN = "microsatellitePattern";
public static final String MICROSAT_SEQ = "microsatSeq";
public static final String ID ="id";
public static final String PRINT_DETAILS = "printDetails";
public static final String PRINT_PATTERN_CONTENT = "printPatternContent";
//returns the name of the Parser as a string
public String getParserName() {
return MICROSATPATTERN;
}
//returns a Patterns object with only one pattern representing that at a microsatellite locus
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
Taxa taxonList = (Taxa)xo.getChild(Taxa.class);
Microsatellite microsatellite = (Microsatellite)xo.getChild(Microsatellite.class);
String[] strLengths = ((String)xo.getElementFirstChild(MICROSAT_SEQ)).split(",");
int[] pattern = new int[strLengths.length];
for(int i = 0; i < strLengths.length; i++){
pattern[i] = microsatellite.getState(strLengths[i]);
}
Patterns microsatPat = new Patterns(microsatellite, taxonList);
microsatPat.addPattern(pattern);
microsatPat.setId((String)xo.getAttribute(ID));
boolean isPrintingDetails = xo.getAttribute(PRINT_DETAILS, true);
boolean isPrintingMicrosatContent = xo.getAttribute(PRINT_PATTERN_CONTENT, true);
if(isPrintingDetails)
printDetails(microsatPat);
if(isPrintingMicrosatContent)
printMicrosatContent(microsatPat);
return microsatPat;
}
public static void printDetails(Patterns microsatPat){
Logger.getLogger("dr.evoxml").info(
" Locus name: "+microsatPat.getId()+
"\n Number of Taxa: "+microsatPat.getPattern(0).length+
"\n min: "+((Microsatellite)microsatPat.getDataType()).getMin()+" "+
"max: "+((Microsatellite)microsatPat.getDataType()).getMax()+
"\n state count: "+microsatPat.getDataType().getStateCount()+"\n");
}
public static void printMicrosatContent(Patterns microsatPat){
Logger.getLogger("dr.evoxml").info(
" Locus name: "+ microsatPat.getId());
int[] pat = microsatPat.getPattern(0);
for(int i = 0; i < pat.length; i++){
Logger.getLogger("dr.evoxml").info(" Taxon: "+microsatPat.getTaxon(i)+" "+"state: "+pat[i]);
}
Logger.getLogger("dr.evoxml").info("\n");
}
public XMLSyntaxRule[] getSyntaxRules() {
return rules;
}
private XMLSyntaxRule[] rules = new XMLSyntaxRule[]{
new ElementRule(Taxa.class),
new ElementRule(Microsatellite.class),
new ElementRule(MICROSAT_SEQ,new XMLSyntaxRule[]{
new ElementRule(String.class,
"A string of numbers representing the microsatellite lengths for a locus",
"1,2,3,4,5,67,100")},false),
new StringAttributeRule(ID, "the name of the locus"),
AttributeRule.newBooleanRule(PRINT_DETAILS, true),
AttributeRule.newBooleanRule(PRINT_PATTERN_CONTENT, true)
};
public String getParserDescription() {
return "This element represents a microsatellite pattern.";
}
public Class getReturnType() {
return Patterns.class;
}
}
|
src/dr/evoxml/MicrosatellitePatternParser.java
|
Parser that returns a Patterns object containing the pattern of one microsatellite locus
git-svn-id: 67bc77c75b8364e4e9cdff0eb6560f5818674cd8@1976 ca793f91-a31e-0410-b540-2769d408b6a1
|
src/dr/evoxml/MicrosatellitePatternParser.java
|
Parser that returns a Patterns object containing the pattern of one microsatellite locus
|
|
Java
|
lgpl-2.1
|
error: pathspec 'src/java/org/jdesktop/swingx/painter/Painters.java' did not match any file(s) known to git
|
90fff38b8d4c4af0094be2f3791cec37e56068c7
| 1
|
RockManJoe64/swingx,RockManJoe64/swingx
|
/*
* $Id$
*
* Copyright 2010 Sun Microsystems, Inc., 4150 Network Circle,
* Santa Clara, California 95054, U.S.A. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package org.jdesktop.swingx.painter;
import java.awt.Graphics2D;
/**
* A collection of static painters. These painters do not store state and are safe to reuse.
* @author kschaefer
*/
public final class Painters {
public static final Painter<Object> EMPTY_PAINTER = new Painter<Object>() {
@Override
public void paint(Graphics2D g, Object object, int width, int height) {
//does nothing
}
};
private Painters() {
//prevent instantiation
}
}
|
src/java/org/jdesktop/swingx/painter/Painters.java
|
Issues 1025 and 1024: Created a Painters zoo for Painter flyweights and created an EMPTY_PAINTER flyweight implementation.
|
src/java/org/jdesktop/swingx/painter/Painters.java
|
Issues 1025 and 1024: Created a Painters zoo for Painter flyweights and created an EMPTY_PAINTER flyweight implementation.
|
|
Java
|
lgpl-2.1
|
error: pathspec 'src/dr/evomodel/substmodel/CovarionSubstitutionModel.java' did not match any file(s) known to git
|
4186240df14896cafd4806725a217368177fc904
| 1
|
4ment/beast-mcmc,beast-dev/beast-mcmc,maxbiostat/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,codeaudit/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,codeaudit/beast-mcmc,beast-dev/beast-mcmc,maxbiostat/beast-mcmc,adamallo/beast-mcmc,4ment/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,codeaudit/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,codeaudit/beast-mcmc,beast-dev/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,codeaudit/beast-mcmc,maxbiostat/beast-mcmc,maxbiostat/beast-mcmc,codeaudit/beast-mcmc
|
/*
* CovarionSubstitutionModel.java
*
* Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodel.substmodel;
import dr.evolution.datatype.*;
import dr.inference.model.Parameter;
import dr.xml.*;
/**
* @author Helen Shearman
* @author Alexei Drummond
*
* @version $Id$
*/
public class CovarionSubstitutionModel extends GeneralSubstitutionModel {
public static final String COVARION_MODEL = "covarionModel";
public static final String ALPHA = "alpha";
public static final String BETA = "beta";
public static final String GAMMA = "gamma";
/**
* constructor
*
* @param dataType the data type
* @param alphaParameter - the rate of evolution in slow mode
* @param betaParameter - the rate of flipping between slow and fast modes
*/
public CovarionSubstitutionModel(TwoStateCovarion dataType, FrequencyModel freqModel,
Parameter alphaParameter,
Parameter betaParameter, Parameter gammaParameter) {
super(COVARION_MODEL, dataType, freqModel, 5);
alpha = alphaParameter;
beta = betaParameter;
gamma = gammaParameter;
addParameter(alpha );
addParameter(beta);
addParameter(gamma);
}
protected void setupRelativeRates() {
relativeRates[0] = alpha.getParameterValue(0);
relativeRates[1] = beta.getParameterValue(0);
relativeRates[2] = 0.0;
relativeRates[3] = 0.0;
relativeRates[4] = gamma.getParameterValue(0);
relativeRates[5] = 1.0;
//for (int i = 0; i < 5; i++) {
// System.out.print(relativeRates[i] + " ");
//}
//System.out.println();
}
/**
* Parses an element from an DOM document into a CovarionSubstitutionModel
*/
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public String getParserName() { return COVARION_MODEL; }
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
Parameter alphaParameter;
Parameter betaParameter;
Parameter gammaParameter;
XMLObject cxo = (XMLObject)xo.getChild(FREQUENCIES);
FrequencyModel freqModel = (FrequencyModel)cxo.getChild(FrequencyModel.class);
TwoStateCovarion dataType = TwoStateCovarion.INSTANCE; // fancy new datatype courtesy of Helen
cxo = (XMLObject)xo.getChild(ALPHA);
alphaParameter = (Parameter)cxo.getChild(Parameter.class);
// alpha must be positive and less than 1.0 because the fast rate is normalized to 1.0
alphaParameter.addBounds(new Parameter.DefaultBounds(1.0, 0.0, 1));
cxo = (XMLObject)xo.getChild(BETA);
betaParameter = (Parameter)cxo.getChild(Parameter.class);
cxo = (XMLObject)xo.getChild(GAMMA);
gammaParameter = (Parameter)cxo.getChild(Parameter.class);
if (dataType != freqModel.getDataType()) {
throw new XMLParseException("Data type of " + getParserName() + " element does not match that of its frequencyModel.");
}
return new CovarionSubstitutionModel(dataType, freqModel, alphaParameter, betaParameter, gammaParameter);
}
//************************************************************************
// AbstractXMLObjectParser implementation
//************************************************************************
public String getParserDescription() {
return "A covarion substitution model of langauge evolution with binary data and a hidden rate state with two rates.";
}
public Class getReturnType() { return CovarionSubstitutionModel.class; }
public XMLSyntaxRule[] getSyntaxRules() { return rules; }
private XMLSyntaxRule[] rules = new XMLSyntaxRule[] {
new ElementRule(FREQUENCIES, FrequencyModel.class),
new ElementRule(ALPHA,
new XMLSyntaxRule[] {
new ElementRule(Parameter.class, true)}
),
new ElementRule(BETA,
new XMLSyntaxRule[] {
new ElementRule(Parameter.class, true)}
),
new ElementRule(GAMMA,
new XMLSyntaxRule[] {
new ElementRule(Parameter.class, true)}
)
};
};
private Parameter alpha;
private Parameter beta;
private Parameter gamma;
}
|
src/dr/evomodel/substmodel/CovarionSubstitutionModel.java
|
Somehow this got missed out in the last commit...
|
src/dr/evomodel/substmodel/CovarionSubstitutionModel.java
|
Somehow this got missed out in the last commit...
|
|
Java
|
lgpl-2.1
|
error: pathspec 'testsuite/standalone/src/test/java/org/wildfly/core/test/standalone/mgmt/api/core/DeploymentModulesListTestCase.java' did not match any file(s) known to git
|
c51e74ff348d0b8febd74068b5ee53ba5117529a
| 1
|
bstansberry/wildfly-core,yersan/wildfly-core,luck3y/wildfly-core,jfdenise/wildfly-core,darranl/wildfly-core,jfdenise/wildfly-core,jamezp/wildfly-core,ivassile/wildfly-core,ivassile/wildfly-core,soul2zimate/wildfly-core,aloubyansky/wildfly-core,bstansberry/wildfly-core,jfdenise/wildfly-core,yersan/wildfly-core,ivassile/wildfly-core,luck3y/wildfly-core,bstansberry/wildfly-core,aloubyansky/wildfly-core,soul2zimate/wildfly-core,darranl/wildfly-core,soul2zimate/wildfly-core,aloubyansky/wildfly-core,yersan/wildfly-core,luck3y/wildfly-core,darranl/wildfly-core,jamezp/wildfly-core,jamezp/wildfly-core
|
/*
* Copyright 2019 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.core.test.standalone.mgmt.api.core;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.client.ModelControllerClient;
import org.jboss.as.controller.client.Operation;
import org.jboss.as.controller.client.helpers.ClientConstants;
import org.jboss.as.controller.client.helpers.Operations;
import org.jboss.as.controller.operations.common.Util;
import org.jboss.as.test.deployment.trivial.ServiceActivatorDeploymentUtil;
import org.jboss.as.test.shared.PermissionUtils;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.Property;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.jboss.shrinkwrap.api.exporter.ZipExporter;
import org.jboss.shrinkwrap.api.spec.JavaArchive;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.core.testrunner.ManagementClient;
import org.wildfly.core.testrunner.WildflyTestRunner;
import javax.inject.Inject;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.PropertyPermission;
import java.util.stream.Collectors;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.CONTENT;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.ENABLED;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.LIST_MODULES;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.RESULT;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.VERBOSE;
import static org.wildfly.common.Assert.assertFalse;
import static org.wildfly.common.Assert.assertTrue;
/**
* List modules which are on deployment’s classpath
* /deployment=application_war_ear_name:list-modules(verbose=false|true)
* @author <a href="mailto:szhantem@redhat.com">Sultan Zhantemirov</a> (c) 2019 Red Hat, inc.
*/
@RunWith(WildflyTestRunner.class)
public class DeploymentModulesListTestCase {
private JavaArchive archive;
private static final String NODE_TYPE = "deployment";
private static final String EXAMPLE_MODULE_TO_EXCLUDE = "ibm.jdk";
private static final String EXAMPLE_USER_MODULE = "jdk.net";
private static final String JAR_DEPLOYMENT_NAME_SUFFIX = "-module-test.jar";
private static final String JAR_DEPLOYMENT_NAME = EXAMPLE_USER_MODULE + JAR_DEPLOYMENT_NAME_SUFFIX;
@Inject
private static ManagementClient managementClient;
private ModelControllerClient client;
@Before
public void deploy() throws Exception {
client = managementClient.getControllerClient();
archive = createJarArchive();
ModelNode response = deployArchive(client, archive);
// check deployment
if (!Operations.isSuccessfulOutcome(response)) {
Assert.fail(String.format("Failed to deploy %s: %s", archive, Operations.getFailureDescription(response).asString()));
}
}
@After
public void undeploy() throws Exception {
undeployArchive(client);
}
@Test
public void listModulesNonVerbose() throws Exception {
this.listModules(false);
}
@Test
public void listModulesVerbose() throws Exception {
this.listModules(true);
}
private void listModules(boolean verbose) throws Exception {
final ModelNode operation = new ModelNode();
operation.get(OP).set(LIST_MODULES);
operation.get(OP_ADDR).set(PathAddress.parseCLIStyleAddress("/" + NODE_TYPE + "=" + archive.getName()).toModelNode());
if (verbose) {
operation.get(VERBOSE).set(Boolean.TRUE.toString());
}
final ModelNode operationResult = client.execute(operation);
// check whether the operation was successful
assertTrue(Operations.isSuccessfulOutcome(operationResult));
// check standard/detailed output
if (!verbose) {
// check whether modules are ordered alphabetically
assertTrue(isOrderedAlphabetically(operationResult));
// check module presence
assertTrue(checkModulesListPresence(operationResult, EXAMPLE_USER_MODULE));
// check module absence
assertFalse(checkModulesListPresence(operationResult, EXAMPLE_MODULE_TO_EXCLUDE));
// check system and user dependencies presence
assertTrue(checkModulesListNonEmptiness(operationResult));
} else {
// check other attributes presence only
assertTrue(checkDetailedOutput(operationResult));
}
}
/**
* Checks given module presence in the "list-modules" command output.
* @param operationResult - operation object to extract result from
* @param moduleName - name of the module expected to be present
* @return true if given module is present in any (system, local, user) list of module dependencies
*/
private boolean checkModulesListPresence(ModelNode operationResult, String moduleName) {
boolean isModulePresent = false;
for (Property dependenciesGroup : operationResult.get(RESULT).asPropertyList()) {
List<Property> list = dependenciesGroup
.getValue()
.asPropertyList()
.stream()
.filter(dependency -> dependency.getValue().asString().equalsIgnoreCase(moduleName))
.collect(Collectors.toList());
if (list.size() > 0) isModulePresent = true;
}
return isModulePresent;
}
/**
* Checks whether both system and user dependencies lists are not empty.
* @param operationResult - operation object to extract result from
* @return true if both system and user dependencies lists are not empty
*/
private boolean checkModulesListNonEmptiness(ModelNode operationResult) {
boolean isSystemDependenciesPresent = false;
boolean isUserDependenciesPresent = false;
for (Property dependenciesGroup : operationResult.get(RESULT).asPropertyList()) {
if (dependenciesGroup.getName().equalsIgnoreCase("system-dependencies")) {
// check system dependencies list non-emptiness
isSystemDependenciesPresent = !dependenciesGroup.getValue().asPropertyList().isEmpty();
}
if (dependenciesGroup.getName().equalsIgnoreCase("user-dependencies")) {
// check system dependencies list non-emptiness
isUserDependenciesPresent = !dependenciesGroup.getValue().asPropertyList().isEmpty();
}
}
return isSystemDependenciesPresent && isUserDependenciesPresent;
}
/**
* Checks whether the module output information contains at least one of the "optional", "export" and "import-services" attributes.
* @param operationResult - operation object to extract result from
* @return true if detailed output is present
*/
private boolean checkDetailedOutput(ModelNode operationResult) {
boolean isDetailedOutput = false;
for (Property dependenciesGroup : operationResult.get(RESULT).asPropertyList()) {
for (ModelNode dependency : dependenciesGroup.getValue().asList()) {
isDetailedOutput = dependency
.asPropertyList()
.stream()
.map(Property::getName)
.anyMatch(attributeName ->
attributeName.equalsIgnoreCase("optional") ||
attributeName.equalsIgnoreCase("import-services") ||
attributeName.equalsIgnoreCase("export")
);
}
}
return isDetailedOutput;
}
private ModelNode deployArchive(ModelControllerClient client, Archive<?> archive) throws IOException {
final List<InputStream> streams = new ArrayList<>();
streams.add(archive.as(ZipExporter.class).exportAsInputStream());
final ModelNode addOperation = Util.createAddOperation(PathAddress.pathAddress(NODE_TYPE, JAR_DEPLOYMENT_NAME));
addOperation.get(ENABLED).set(true);
addOperation.get(CONTENT).add().get(ClientConstants.INPUT_STREAM_INDEX).set(0);
return client.execute(Operation.Factory.create(addOperation, streams, true));
}
private void undeployArchive(ModelControllerClient client) throws IOException {
final ModelNode removeOperation = Util.createRemoveOperation(PathAddress.pathAddress(NODE_TYPE, JAR_DEPLOYMENT_NAME));
client.execute(Operation.Factory.create(removeOperation));
}
private JavaArchive createJarArchive() throws Exception {
final Properties properties = new Properties();
final JavaArchive archive = ServiceActivatorDeploymentUtil
.createServiceActivatorDeploymentArchive(JAR_DEPLOYMENT_NAME, properties);
archive.delete("META-INF/permissions.xml");
archive.addAsManifestResource(PermissionUtils.createPermissionsXmlAsset(
new PropertyPermission("test.deployment.trivial.prop", "write"),
new PropertyPermission(JAR_DEPLOYMENT_NAME + "Service", "write"),
new PropertyPermission("service", "write")
), "permissions.xml");
archive.addAsResource(new StringAsset(prepareJBossDeploymentStructure()),
"META-INF/jboss-deployment-structure.xml");
return archive;
}
private String prepareJBossDeploymentStructure() {
return "<jboss-deployment-structure>\n" +
" <deployment>\n" +
" <exclusions>\n" +
" <module name=\"" + EXAMPLE_MODULE_TO_EXCLUDE + "\"/>\n" +
" </exclusions>\n" +
" <dependencies>\n" +
" <module name=\"" + EXAMPLE_USER_MODULE + "\"/>\n" +
" </dependencies>\n" +
" </deployment>\n" +
"</jboss-deployment-structure>\n";
}
private boolean isOrderedAlphabetically(ModelNode operationResult) {
List<String> dependenciesList;
List<Property> list;
boolean isSorted = true;
for (Property dependenciesGroup : operationResult.get(RESULT).asPropertyList()) {
dependenciesList = new ArrayList<>();
list = dependenciesGroup.getValue().asPropertyList();
for (Property dependency : list) {
dependenciesList.add(dependency.getValue().asString());
}
isSorted = isSorted(dependenciesList);
}
return isSorted;
}
private boolean isSorted(List<String> list) {
boolean sorted = true;
for (int i = 1; i < list.size(); i++) {
if (list.get(i - 1).compareTo(list.get(i)) > 0) {
sorted = false;
}
}
return sorted;
}
}
|
testsuite/standalone/src/test/java/org/wildfly/core/test/standalone/mgmt/api/core/DeploymentModulesListTestCase.java
|
[WFCORE-4251] Add Test Case
[WFCORE-4251] Remove wildcard import from test case
|
testsuite/standalone/src/test/java/org/wildfly/core/test/standalone/mgmt/api/core/DeploymentModulesListTestCase.java
|
[WFCORE-4251] Add Test Case
|
|
Java
|
unlicense
|
692c8544ac550ba77e1c9b5a5a748226012077e0
| 0
|
HenryLoenwind/EnderIO,Samernieve/EnderIO,eduardog3000/EnderIO,SleepyTrousers/EnderIO,torteropaid/EnderIO,mmelvin0/EnderIO,Quantum64/EnderIO,MatthiasMann/EnderIO,Joccob/EnderIO,mezz/EnderIO,Joccob/EnderIO,Vexatos/EnderIO,D-Inc/EnderIO,Vexatos/EnderIO,MrNuggelz/EnderIO,eduardog3000/EnderIO
|
package crazypants.enderio.conduit.power;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.minecraft.client.renderer.texture.IconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.Icon;
import net.minecraft.world.World;
import net.minecraftforge.common.ForgeDirection;
import buildcraft.api.power.PowerHandler;
import buildcraft.api.power.PowerHandler.PowerReceiver;
import buildcraft.api.power.PowerHandler.Type;
import crazypants.enderio.ModObject;
import crazypants.enderio.conduit.AbstractConduit;
import crazypants.enderio.conduit.AbstractConduitNetwork;
import crazypants.enderio.conduit.ConduitUtil;
import crazypants.enderio.conduit.ConnectionMode;
import crazypants.enderio.conduit.IConduit;
import crazypants.enderio.conduit.IConduitBundle;
import crazypants.enderio.conduit.RaytraceResult;
import crazypants.enderio.conduit.geom.CollidableComponent;
import crazypants.enderio.power.BasicCapacitor;
import crazypants.enderio.power.ICapacitor;
import crazypants.enderio.power.PowerHandlerUtil;
import crazypants.enderio.power.PowerInterface;
import crazypants.render.BoundingBox;
import crazypants.render.IconUtil;
import crazypants.util.BlockCoord;
import crazypants.vecmath.Vector3d;
public class PowerConduit extends AbstractConduit implements IPowerConduit {
static final Map<String, Icon> ICONS = new HashMap<String, Icon>();
static final ICapacitor[] CAPACITORS = new BasicCapacitor[] {
new BasicCapacitor(350, 1500, 128),
new BasicCapacitor(500, 3000, 512),
new BasicCapacitor(500, 5000, 2048)
};
static final String[] POSTFIX = new String[] { "", "Enhanced", "Ender" };
static ItemStack createItemStackForSubtype(int subtype) {
ItemStack result = new ItemStack(ModObject.itemPowerConduit.actualId, 1, subtype);
return result;
}
public static void initIcons() {
IconUtil.addIconProvider(new IconUtil.IIconProvider() {
@Override
public void registerIcons(IconRegister register) {
for (String pf : POSTFIX) {
ICONS.put(ICON_KEY + pf, register.registerIcon(ICON_KEY + pf));
ICONS.put(ICON_KEY_INPUT + pf, register.registerIcon(ICON_KEY_INPUT + pf));
ICONS.put(ICON_KEY_OUTPUT + pf, register.registerIcon(ICON_KEY_OUTPUT + pf));
ICONS.put(ICON_CORE_KEY + pf, register.registerIcon(ICON_CORE_KEY + pf));
}
ICONS.put(ICON_TRANSMISSION_KEY, register.registerIcon(ICON_TRANSMISSION_KEY));
}
@Override
public int getTextureType() {
return 0;
}
});
}
public static final float WIDTH = 0.075f;
public static final float HEIGHT = 0.075f;
public static final Vector3d MIN = new Vector3d(0.5f - WIDTH, 0.5 - HEIGHT, 0.5 - WIDTH);
public static final Vector3d MAX = new Vector3d(MIN.x + WIDTH, MIN.y + HEIGHT, MIN.z + WIDTH);
public static final BoundingBox BOUNDS = new BoundingBox(MIN, MAX);
protected PowerConduitNetwork network;
private PowerHandler powerHandler;
private PowerHandler noInputPH;
private int subtype;
public PowerConduit() {
}
public PowerConduit(int meta) {
this.subtype = meta;
powerHandler = createPowerHandlerForType();
}
@Override
public boolean onBlockActivated(EntityPlayer player, RaytraceResult res, List<RaytraceResult> all) {
if(ConduitUtil.isToolEquipped(player)) {
if(!getBundle().getEntity().worldObj.isRemote) {
if(res != null && res.component != null) {
ForgeDirection connDir = res.component.dir;
ForgeDirection faceHit = ForgeDirection.getOrientation(res.movingObjectPosition.sideHit);
if(connDir == ForgeDirection.UNKNOWN || connDir == faceHit) {
// Attempt to join networks
BlockCoord loc = getLocation().getLocation(faceHit);
IPowerConduit neighbour = ConduitUtil.getConduit(getBundle().getEntity().worldObj, loc.x, loc.y, loc.z, IPowerConduit.class);
if(neighbour != null) {
if(network != null) {
network.destroyNetwork();
}
onAddedToBundle();
return true;
}
} else if(externalConnections.contains(connDir)) {
setConnectionMode(connDir, getNextConnectionMode(connDir));
return true;
} else if(containsConduitConnection(connDir)) {
conduitConnectionRemoved(connDir);
BlockCoord loc = getLocation().getLocation(connDir);
IPowerConduit neighbour = ConduitUtil.getConduit(getBundle().getEntity().worldObj, loc.x, loc.y, loc.z, IPowerConduit.class);
if(neighbour != null) {
neighbour.conduitConnectionRemoved(connDir.getOpposite());
}
if(network != null) {
network.destroyNetwork();
}
return true;
}
}
}
}
return false;
}
@Override
public ICapacitor getCapacitor() {
return CAPACITORS[subtype];
}
private PowerHandler createPowerHandlerForType() {
return PowerHandlerUtil.createHandler(CAPACITORS[subtype], this, Type.PIPE);
}
@Override
public void writeToNBT(NBTTagCompound nbtRoot) {
super.writeToNBT(nbtRoot);
nbtRoot.setShort("subtype", (short) subtype);
nbtRoot.setFloat("energyStored", powerHandler.getEnergyStored());
}
@Override
public void readFromNBT(NBTTagCompound nbtRoot) {
super.readFromNBT(nbtRoot);
subtype = nbtRoot.getShort("subtype");
if(powerHandler == null) {
powerHandler = createPowerHandlerForType();
}
powerHandler.setEnergy(Math.min(powerHandler.getMaxEnergyStored(), nbtRoot.getFloat("energyStored")));
}
@Override
public PowerReceiver getPowerReceiver(ForgeDirection side) {
ConnectionMode mode = getConectionMode(side);
if(mode == ConnectionMode.OUTPUT || mode == ConnectionMode.DISABLED) {
if(noInputPH == null) {
noInputPH = new PowerHandler(this, Type.PIPE);
noInputPH.configure(0, 0, 0, powerHandler.getMaxEnergyStored());
}
return noInputPH.getPowerReceiver();
}
return powerHandler.getPowerReceiver();
}
@Override
public float getMaxEnergyExtracted(ForgeDirection dir) {
ConnectionMode mode = getConectionMode(dir);
if(mode == ConnectionMode.INPUT || mode == ConnectionMode.DISABLED) {
return 0;
}
return getCapacitor().getMaxEnergyExtracted();
}
@Override
public float getMaxEnergyRecieved(ForgeDirection dir) {
ConnectionMode mode = getConectionMode(dir);
if(mode == ConnectionMode.OUTPUT || mode == ConnectionMode.DISABLED) {
return 0;
}
return getCapacitor().getMaxEnergyReceived();
}
@Override
public PowerHandler getPowerHandler() {
return powerHandler;
}
@Override
public void applyPerdition() {
}
@Override
public void doWork(PowerHandler workProvider) {
}
@Override
public World getWorld() {
return getBundle().getEntity().worldObj;
}
@Override
public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) {
if(getMaxEnergyRecieved(from) == 0) {
return 0;
}
return PowerHandlerUtil.recieveRedstoneFlux(from, powerHandler, maxReceive, simulate);
}
@Override
public int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate) {
return 0;
}
@Override
public boolean canInterface(ForgeDirection from) {
return true;
}
@Override
public int getEnergyStored(ForgeDirection from) {
return (int) (powerHandler.getEnergyStored() * 10);
}
@Override
public int getMaxEnergyStored(ForgeDirection from) {
return (int) (powerHandler.getMaxEnergyStored() * 10);
}
@Override
public AbstractConduitNetwork<?> getNetwork() {
return network;
}
@Override
public boolean setNetwork(AbstractConduitNetwork<?> network) {
this.network = (PowerConduitNetwork) network;
return true;
}
@Override
public boolean canConnectToExternal(ForgeDirection direction) {
PowerInterface rec = getExternalPowerReceptor(direction);
return rec != null && rec.canConduitConnect(direction);
}
@Override
public void externalConnectionAdded(ForgeDirection direction) {
super.externalConnectionAdded(direction);
if(network != null) {
TileEntity te = bundle.getEntity();
network.powerReceptorAdded(this, direction, te.xCoord + direction.offsetX, te.yCoord + direction.offsetY, te.zCoord + direction.offsetZ,
getExternalPowerReceptor(direction));
}
}
@Override
public void externalConnectionRemoved(ForgeDirection direction) {
super.externalConnectionRemoved(direction);
if(network != null) {
TileEntity te = bundle.getEntity();
network.powerReceptorRemoved(te.xCoord + direction.offsetX, te.yCoord + direction.offsetY, te.zCoord + direction.offsetZ);
}
}
@Override
public PowerInterface getExternalPowerReceptor(ForgeDirection direction) {
TileEntity te = bundle.getEntity();
World world = te.worldObj;
if(world == null) {
return null;
}
TileEntity test = world.getBlockTileEntity(te.xCoord + direction.offsetX, te.yCoord + direction.offsetY, te.zCoord + direction.offsetZ);
if(test == null) {
return null;
}
if(test instanceof IConduitBundle) {
return null;
}
return PowerInterface.create(test);
}
@Override
public ItemStack createItem() {
return createItemStackForSubtype(subtype);
}
@Override
public Class<? extends IConduit> getBaseConduitType() {
return IPowerConduit.class;
}
// Rendering
@Override
public Icon getTextureForState(CollidableComponent component) {
if(component.dir == ForgeDirection.UNKNOWN) {
return ICONS.get(ICON_CORE_KEY + POSTFIX[subtype]);
}
return ICONS.get(ICON_KEY + POSTFIX[subtype]);
}
@Override
public Icon getTextureForInputMode() {
return ICONS.get(ICON_KEY_INPUT + POSTFIX[subtype]);
}
@Override
public Icon getTextureForOutputMode() {
return ICONS.get(ICON_KEY_OUTPUT + POSTFIX[subtype]);
}
@Override
public Icon getTransmitionTextureForState(CollidableComponent component) {
return null;
}
}
|
common/crazypants/enderio/conduit/power/PowerConduit.java
|
package crazypants.enderio.conduit.power;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import cofh.api.energy.IEnergyHandler;
import net.minecraft.client.renderer.texture.IconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.Icon;
import net.minecraft.world.World;
import net.minecraftforge.common.ForgeDirection;
import buildcraft.api.power.IPowerEmitter;
import buildcraft.api.power.IPowerReceptor;
import buildcraft.api.power.PowerHandler;
import buildcraft.api.power.PowerHandler.PowerReceiver;
import buildcraft.api.power.PowerHandler.Type;
import crazypants.enderio.ModObject;
import crazypants.enderio.conduit.AbstractConduit;
import crazypants.enderio.conduit.AbstractConduitNetwork;
import crazypants.enderio.conduit.ConduitUtil;
import crazypants.enderio.conduit.ConnectionMode;
import crazypants.enderio.conduit.IConduit;
import crazypants.enderio.conduit.IConduitBundle;
import crazypants.enderio.conduit.RaytraceResult;
import crazypants.enderio.conduit.geom.CollidableComponent;
import crazypants.enderio.power.BasicCapacitor;
import crazypants.enderio.power.ICapacitor;
import crazypants.enderio.power.PowerHandlerUtil;
import crazypants.enderio.power.PowerInterface;
import crazypants.render.BoundingBox;
import crazypants.render.IconUtil;
import crazypants.util.BlockCoord;
import crazypants.vecmath.Vector3d;
public class PowerConduit extends AbstractConduit implements IPowerConduit {
static final Map<String, Icon> ICONS = new HashMap<String, Icon>();
static final ICapacitor[] CAPACITORS = new BasicCapacitor[] {
new BasicCapacitor(250, 1500, 128),
new BasicCapacitor(350, 3000, 512),
new BasicCapacitor(500, 5000, 2048)
};
static final String[] POSTFIX = new String[] { "", "Enhanced", "Ender" };
static ItemStack createItemStackForSubtype(int subtype) {
ItemStack result = new ItemStack(ModObject.itemPowerConduit.actualId, 1, subtype);
return result;
}
public static void initIcons() {
IconUtil.addIconProvider(new IconUtil.IIconProvider() {
@Override
public void registerIcons(IconRegister register) {
for (String pf : POSTFIX) {
ICONS.put(ICON_KEY + pf, register.registerIcon(ICON_KEY + pf));
ICONS.put(ICON_KEY_INPUT + pf, register.registerIcon(ICON_KEY_INPUT + pf));
ICONS.put(ICON_KEY_OUTPUT + pf, register.registerIcon(ICON_KEY_OUTPUT + pf));
ICONS.put(ICON_CORE_KEY + pf, register.registerIcon(ICON_CORE_KEY + pf));
}
ICONS.put(ICON_TRANSMISSION_KEY, register.registerIcon(ICON_TRANSMISSION_KEY));
}
@Override
public int getTextureType() {
return 0;
}
});
}
public static final float WIDTH = 0.075f;
public static final float HEIGHT = 0.075f;
public static final Vector3d MIN = new Vector3d(0.5f - WIDTH, 0.5 - HEIGHT, 0.5 - WIDTH);
public static final Vector3d MAX = new Vector3d(MIN.x + WIDTH, MIN.y + HEIGHT, MIN.z + WIDTH);
public static final BoundingBox BOUNDS = new BoundingBox(MIN, MAX);
protected PowerConduitNetwork network;
private PowerHandler powerHandler;
private PowerHandler noInputPH;
private int subtype;
public PowerConduit() {
}
public PowerConduit(int meta) {
this.subtype = meta;
powerHandler = createPowerHandlerForType();
}
@Override
public boolean onBlockActivated(EntityPlayer player, RaytraceResult res, List<RaytraceResult> all) {
if(ConduitUtil.isToolEquipped(player)) {
if(!getBundle().getEntity().worldObj.isRemote) {
if(res != null && res.component != null) {
ForgeDirection connDir = res.component.dir;
ForgeDirection faceHit = ForgeDirection.getOrientation(res.movingObjectPosition.sideHit);
if(connDir == ForgeDirection.UNKNOWN || connDir == faceHit) {
// Attempt to join networks
BlockCoord loc = getLocation().getLocation(faceHit);
IPowerConduit neighbour = ConduitUtil.getConduit(getBundle().getEntity().worldObj, loc.x, loc.y, loc.z, IPowerConduit.class);
if(neighbour != null) {
if(network != null) {
network.destroyNetwork();
}
onAddedToBundle();
return true;
}
} else if(externalConnections.contains(connDir)) {
setConnectionMode(connDir, getNextConnectionMode(connDir));
return true;
} else if(containsConduitConnection(connDir)) {
conduitConnectionRemoved(connDir);
BlockCoord loc = getLocation().getLocation(connDir);
IPowerConduit neighbour = ConduitUtil.getConduit(getBundle().getEntity().worldObj, loc.x, loc.y, loc.z, IPowerConduit.class);
if(neighbour != null) {
neighbour.conduitConnectionRemoved(connDir.getOpposite());
}
if(network != null) {
network.destroyNetwork();
}
return true;
}
}
}
}
return false;
}
@Override
public ICapacitor getCapacitor() {
return CAPACITORS[subtype];
}
private PowerHandler createPowerHandlerForType() {
return PowerHandlerUtil.createHandler(CAPACITORS[subtype], this, Type.PIPE);
}
@Override
public void writeToNBT(NBTTagCompound nbtRoot) {
super.writeToNBT(nbtRoot);
nbtRoot.setShort("subtype", (short) subtype);
nbtRoot.setFloat("energyStored", powerHandler.getEnergyStored());
}
@Override
public void readFromNBT(NBTTagCompound nbtRoot) {
super.readFromNBT(nbtRoot);
subtype = nbtRoot.getShort("subtype");
if(powerHandler == null) {
powerHandler = createPowerHandlerForType();
}
powerHandler.setEnergy(Math.min(powerHandler.getMaxEnergyStored(), nbtRoot.getFloat("energyStored")));
}
@Override
public PowerReceiver getPowerReceiver(ForgeDirection side) {
ConnectionMode mode = getConectionMode(side);
if(mode == ConnectionMode.OUTPUT || mode == ConnectionMode.DISABLED) {
if(noInputPH == null) {
noInputPH = new PowerHandler(this, Type.PIPE);
noInputPH.configure(0, 0, 0, powerHandler.getMaxEnergyStored());
}
return noInputPH.getPowerReceiver();
}
return powerHandler.getPowerReceiver();
}
@Override
public float getMaxEnergyExtracted(ForgeDirection dir) {
ConnectionMode mode = getConectionMode(dir);
if(mode == ConnectionMode.INPUT || mode == ConnectionMode.DISABLED) {
return 0;
}
return getCapacitor().getMaxEnergyExtracted();
}
@Override
public float getMaxEnergyRecieved(ForgeDirection dir) {
ConnectionMode mode = getConectionMode(dir);
if(mode == ConnectionMode.OUTPUT || mode == ConnectionMode.DISABLED) {
return 0;
}
return getCapacitor().getMaxEnergyReceived();
}
@Override
public PowerHandler getPowerHandler() {
return powerHandler;
}
@Override
public void applyPerdition() {
}
@Override
public void doWork(PowerHandler workProvider) {
}
@Override
public World getWorld() {
return getBundle().getEntity().worldObj;
}
@Override
public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) {
if(getMaxEnergyRecieved(from) == 0) {
return 0;
}
return PowerHandlerUtil.recieveRedstoneFlux(from, powerHandler, maxReceive, simulate);
}
@Override
public int extractEnergy(ForgeDirection from, int maxExtract, boolean simulate) {
return 0;
}
@Override
public boolean canInterface(ForgeDirection from) {
return true;
}
@Override
public int getEnergyStored(ForgeDirection from) {
return (int)(powerHandler.getEnergyStored() * 10);
}
@Override
public int getMaxEnergyStored(ForgeDirection from) {
return (int)(powerHandler.getMaxEnergyStored() * 10);
}
@Override
public AbstractConduitNetwork<?> getNetwork() {
return network;
}
@Override
public boolean setNetwork(AbstractConduitNetwork<?> network) {
this.network = (PowerConduitNetwork) network;
return true;
}
@Override
public boolean canConnectToExternal(ForgeDirection direction) {
PowerInterface rec = getExternalPowerReceptor(direction);
return rec != null && rec.canConduitConnect(direction);
}
@Override
public void externalConnectionAdded(ForgeDirection direction) {
super.externalConnectionAdded(direction);
if(network != null) {
TileEntity te = bundle.getEntity();
network.powerReceptorAdded(this, direction, te.xCoord + direction.offsetX, te.yCoord + direction.offsetY, te.zCoord + direction.offsetZ,
getExternalPowerReceptor(direction));
}
}
@Override
public void externalConnectionRemoved(ForgeDirection direction) {
super.externalConnectionRemoved(direction);
if(network != null) {
TileEntity te = bundle.getEntity();
network.powerReceptorRemoved(te.xCoord + direction.offsetX, te.yCoord + direction.offsetY, te.zCoord + direction.offsetZ);
}
}
@Override
public PowerInterface getExternalPowerReceptor(ForgeDirection direction) {
TileEntity te = bundle.getEntity();
World world = te.worldObj;
if(world == null) {
return null;
}
TileEntity test = world.getBlockTileEntity(te.xCoord + direction.offsetX, te.yCoord + direction.offsetY, te.zCoord + direction.offsetZ);
if(test == null) {
return null;
}
if(test instanceof IConduitBundle) {
return null;
}
return PowerInterface.create(test);
}
@Override
public ItemStack createItem() {
return createItemStackForSubtype(subtype);
}
@Override
public Class<? extends IConduit> getBaseConduitType() {
return IPowerConduit.class;
}
// Rendering
@Override
public Icon getTextureForState(CollidableComponent component) {
if(component.dir == ForgeDirection.UNKNOWN) {
return ICONS.get(ICON_CORE_KEY + POSTFIX[subtype]);
}
return ICONS.get(ICON_KEY + POSTFIX[subtype]);
}
@Override
public Icon getTextureForInputMode() {
return ICONS.get(ICON_KEY_INPUT + POSTFIX[subtype]);
}
@Override
public Icon getTextureForOutputMode() {
return ICONS.get(ICON_KEY_OUTPUT + POSTFIX[subtype]);
}
@Override
public Icon getTransmitionTextureForState(CollidableComponent component) {
return null;
}
}
|
#290 Energy conduit max input is too low for BC combustion engines
|
common/crazypants/enderio/conduit/power/PowerConduit.java
|
#290 Energy conduit max input is too low for BC combustion engines
|
|
Java
|
apache-2.0
|
f2be652207d04acf555fba1832f56f8425185d95
| 0
|
TarantulaTechnology/JGroups,slaskawi/JGroups,deepnarsay/JGroups,rhusar/JGroups,rhusar/JGroups,pruivo/JGroups,ligzy/JGroups,kedzie/JGroups,rpelisse/JGroups,Sanne/JGroups,pruivo/JGroups,rpelisse/JGroups,ligzy/JGroups,rvansa/JGroups,Sanne/JGroups,deepnarsay/JGroups,belaban/JGroups,pruivo/JGroups,ibrahimshbat/JGroups,vjuranek/JGroups,slaskawi/JGroups,danberindei/JGroups,danberindei/JGroups,belaban/JGroups,belaban/JGroups,ibrahimshbat/JGroups,dimbleby/JGroups,slaskawi/JGroups,vjuranek/JGroups,ligzy/JGroups,rhusar/JGroups,pferraro/JGroups,dimbleby/JGroups,vjuranek/JGroups,deepnarsay/JGroups,dimbleby/JGroups,ibrahimshbat/JGroups,Sanne/JGroups,tristantarrant/JGroups,TarantulaTechnology/JGroups,danberindei/JGroups,pferraro/JGroups,pferraro/JGroups,rpelisse/JGroups,TarantulaTechnology/JGroups,tristantarrant/JGroups,ibrahimshbat/JGroups,kedzie/JGroups,kedzie/JGroups,rvansa/JGroups
|
package org.jgroups;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jgroups.conf.ConfiguratorFactory;
import org.jgroups.conf.ProtocolStackConfigurator;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.ProtocolStack;
import org.jgroups.stack.StateTransferInfo;
import org.jgroups.util.Promise;
import org.jgroups.util.Queue;
import org.jgroups.util.QueueClosedException;
import org.jgroups.util.Util;
import org.w3c.dom.Element;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Exchanger;
/**
* JChannel is a pure Java implementation of Channel.
* When a JChannel object is instantiated it automatically sets up the
* protocol stack.
* <p>
* <B>Properties</B>
* <P>
* Properties are used to configure a channel, and are accepted in
* several forms; the String form is described here.
* A property string consists of a number of properties separated by
* colons. For example:
* <p>
* <pre>"<prop1>(arg1=val1):<prop2>(arg1=val1;arg2=val2):<prop3>:<propn>"</pre>
* <p>
* Each property relates directly to a protocol layer, which is
* implemented as a Java class. When a protocol stack is to be created
* based on the above property string, the first property becomes the
* bottom-most layer, the second one will be placed on the first, etc.:
* the stack is created from the bottom to the top, as the string is
* parsed from left to right. Each property has to be the name of a
* Java class that resides in the
* {@link org.jgroups.protocols} package.
* <p>
* Note that only the base name has to be given, not the fully specified
* class name (e.g., UDP instead of org.jgroups.protocols.UDP).
* <p>
* Each layer may have 0 or more arguments, which are specified as a
* list of name/value pairs in parentheses directly after the property.
* In the example above, the first protocol layer has 1 argument,
* the second 2, the third none. When a layer is created, these
* properties (if there are any) will be set in a layer by invoking
* the layer's setProperties() method
* <p>
* As an example the property string below instructs JGroups to create
* a JChannel with protocols UDP, PING, FD and GMS:<p>
* <pre>"UDP(mcast_addr=228.10.9.8;mcast_port=5678):PING:FD:GMS"</pre>
* <p>
* The UDP protocol layer is at the bottom of the stack, and it
* should use mcast address 228.10.9.8. and port 5678 rather than
* the default IP multicast address and port. The only other argument
* instructs FD to output debug information while executing.
* Property UDP refers to a class {@link org.jgroups.protocols.UDP},
* which is subsequently loaded and an instance of which is created as protocol layer.
* If any of these classes are not found, an exception will be thrown and
* the construction of the stack will be aborted.
*
* @author Bela Ban
* @version $Id: JChannel.java,v 1.169 2008/02/04 07:47:40 belaban Exp $
*/
public class JChannel extends Channel {
/**
* The default protocol stack used by the default constructor.
*/
public static final String DEFAULT_PROTOCOL_STACK="udp.xml";
static final String FORCE_PROPS="force.properties";
/* the protocol stack configuration string */
private String props=null;
/*the address of this JChannel instance*/
private Address local_addr=null;
/*the channel (also know as group) name*/
private String cluster_name=null; // group name
/*the latest view of the group membership*/
private View my_view=null;
/*the queue that is used to receive messages (events) from the protocol stack*/
private final Queue mq=new Queue();
/*the protocol stack, used to send and receive messages from the protocol stack*/
private ProtocolStack prot_stack=null;
/** Thread responsible for closing a channel and potentially reconnecting to it (e.g., when shunned). */
protected CloserThread closer=null;
/** To wait until a local address has been assigned */
private final Promise<Address> local_addr_promise=new Promise<Address>();
private final Promise<Boolean> state_promise=new Promise<Boolean>();
private final Exchanger<StateTransferInfo> applstate_exchanger=new Exchanger<StateTransferInfo>();
private final Promise<Boolean> flush_unblock_promise=new Promise<Boolean>();
/** wait until we have a non-null local_addr */
private long LOCAL_ADDR_TIMEOUT=30000; //=Long.parseLong(System.getProperty("local_addr.timeout", "30000"));
/*if the states is fetched automatically, this is the default timeout, 5 secs*/
private static final long GET_STATE_DEFAULT_TIMEOUT=5000;
/*if FLUSH is used channel waits for UNBLOCK event, this is the default timeout, 5 secs*/
private static final long FLUSH_UNBLOCK_TIMEOUT=5000;
/*flag to indicate whether to receive blocks, if this is set to true, receive_views is set to true*/
private boolean receive_blocks=false;
/*flag to indicate whether to receive local messages
*if this is set to false, the JChannel will not receive messages sent by itself*/
private boolean receive_local_msgs=true;
/*flag to indicate whether the channel will reconnect (reopen) when the exit message is received*/
private boolean auto_reconnect=true;
/*flag t indicate whether the state is supposed to be retrieved after the channel is reconnected
*setting this to true, automatically forces auto_reconnect to true*/
private boolean auto_getstate=true;
/*channel connected flag*/
protected volatile boolean connected=false;
/*channel closed flag*/
protected volatile boolean closed=false; // close() has been called, channel is unusable
/** True if a state transfer protocol is available, false otherwise */
private boolean state_transfer_supported=false; // set by CONFIG event from STATE_TRANSFER protocol
/** True if a flush protocol is available, false otherwise */
private volatile boolean flush_supported=false; // set by CONFIG event from FLUSH protocol
/** Provides storage for arbitrary objects. Protocols can send up CONFIG events, and all key-value pairs of
* a CONFIG event will be added to additional_data. On reconnect, a CONFIG event will be sent down by the channel,
* containing all key-value pairs of additional_data
*/
protected final Map<String,Object> additional_data=new HashMap<String,Object>();
protected final ConcurrentMap<String,Object> info=new ConcurrentHashMap<String,Object>();
protected final Log log=LogFactory.getLog(getClass());
/** Collect statistics */
protected boolean stats=true;
protected long sent_msgs=0, received_msgs=0, sent_bytes=0, received_bytes=0;
/** Used by subclass to create a JChannel without a protocol stack, don't use as application programmer */
protected JChannel(boolean no_op) {
;
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* specified by the <code>DEFAULT_PROTOCOL_STACK</code> member.
*
* @throws ChannelException if problems occur during the initialization of
* the protocol stack.
*/
public JChannel() throws ChannelException {
this(DEFAULT_PROTOCOL_STACK);
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration contained by the specified file.
*
* @param properties a file containing a JGroups XML protocol stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration or
* initialization of the protocol stack.
*/
public JChannel(File properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration contained by the specified XML element.
*
* @param properties a XML element containing a JGroups XML protocol stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration or
* initialization of the protocol stack.
*/
public JChannel(Element properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration indicated by the specified URL.
*
* @param properties a URL pointing to a JGroups XML protocol stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration or
* initialization of the protocol stack.
*/
public JChannel(URL properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration based upon the specified properties parameter.
*
* @param properties an old style property string, a string representing a
* system resource containing a JGroups XML configuration,
* a string representing a URL pointing to a JGroups XML
* XML configuration, or a string representing a file name
* that contains a JGroups XML configuration.
*
* @throws ChannelException if problems occur during the configuration and
* initialization of the protocol stack.
*/
public JChannel(String properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration contained by the protocol stack configurator parameter.
* <p>
* All of the public constructors of this class eventually delegate to this
* method.
*
* @param configurator a protocol stack configurator containing a JGroups
* protocol stack configuration.
*
* @throws ChannelException if problems occur during the initialization of
* the protocol stack.
*/
public JChannel(ProtocolStackConfigurator configurator) throws ChannelException {
init(configurator);
}
/**
* Creates a new JChannel with the protocol stack as defined in the properties
* parameter. an example of this parameter is<BR>
* "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE"<BR>
* Other examples can be found in the ./conf directory<BR>
* @param properties the protocol stack setup; if null, the default protocol stack will be used.
* The properties can also be a java.net.URL object or a string that is a URL spec.
* The JChannel will validate any URL object and String object to see if they are a URL.
* In case of the parameter being a url, the JChannel will try to load the xml from there.
* In case properties is a org.w3c.dom.Element, the ConfiguratorFactory will parse the
* DOM tree with the element as its root element.
* @deprecated Use the constructors with specific parameter types instead.
*/
public JChannel(Object properties) throws ChannelException {
if (properties == null)
properties = DEFAULT_PROTOCOL_STACK;
ProtocolStackConfigurator c=null;
try {
c=ConfiguratorFactory.getStackConfigurator(properties);
}
catch(Exception x) {
throw new ChannelException("unable to load protocol stack", x);
}
init(c);
}
/**
* Returns the protocol stack.
* Currently used by Debugger.
* Specific to JChannel, therefore
* not visible in Channel
*/
public ProtocolStack getProtocolStack() {
return prot_stack;
}
protected Log getLog() {
return log;
}
/**
* returns the protocol stack configuration in string format.
* an example of this property is<BR>
* "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE"
*/
public String getProperties() {
return props;
}
public boolean statsEnabled() {
return stats;
}
public void enableStats(boolean stats) {
this.stats=stats;
}
public void resetStats() {
sent_msgs=received_msgs=sent_bytes=received_bytes=0;
}
public long getSentMessages() {return sent_msgs;}
public long getSentBytes() {return sent_bytes;}
public long getReceivedMessages() {return received_msgs;}
public long getReceivedBytes() {return received_bytes;}
public int getNumberOfTasksInTimer() {return prot_stack != null ? prot_stack.timer.size() : -1;}
public int getTimerThreads() {
return prot_stack != null? prot_stack.getTimerThreads() : -1;
}
public String dumpTimerQueue() {
return prot_stack != null? prot_stack.dumpTimerQueue() : "<n/a";
}
/**
* Returns a pretty-printed form of all the protocols. If include_properties is set,
* the properties for each protocol will also be printed.
*/
public String printProtocolSpec(boolean include_properties) {
return prot_stack != null ? prot_stack.printProtocolSpec(include_properties) : null;
}
/**
* Connects the channel to a group.
* If the channel is already connected, an error message will be printed to the error log.
* If the channel is closed a ChannelClosed exception will be thrown.
* This method starts the protocol stack by calling ProtocolStack.start,
* then it sends an Event.CONNECT event down the stack and waits for the return value.
* Once the call returns, the channel listeners are notified and the channel is considered connected.
*
* @param cluster_name A <code>String</code> denoting the group name. Cannot be null.
* @exception ChannelException The protocol stack cannot be started
* @exception ChannelClosedException The channel is closed and therefore cannot be used any longer.
* A new channel has to be created first.
*/
public synchronized void connect(String cluster_name) throws ChannelException {
startStack(cluster_name);
// only connect if we are not a unicast channel
if(cluster_name != null) {
if(flush_supported)
flush_unblock_promise.reset();
Event connect_event=new Event(Event.CONNECT, cluster_name);
Object res=downcall(connect_event); // waits forever until connected (or channel is closed)
if(res != null && res instanceof Exception) { // the JOIN was rejected by the coordinator
stopStack(true, false);
init();
throw new ChannelException("connect() failed", (Throwable)res);
}
//if FLUSH is used do not return from connect() until UNBLOCK event is received
if(flush_supported) {
try {
flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT);
}
catch (TimeoutException timeout) {
if(log.isWarnEnabled())
log.warn(local_addr + " waiting on UNBLOCK after connect() timed out");
}
}
}
connected=true;
notifyChannelConnected(this);
}
/**
* Connects this channel to a group and gets a state from a specified state
* provider.
* <p>
*
* This method essentially invokes
* <code>connect<code> and <code>getState<code> methods successively.
* If FLUSH protocol is in channel's stack definition only one flush is executed for both connecting and
* fetching state rather than two flushes if we invoke <code>connect<code> and <code>getState<code> in succesion.
*
* If the channel is already connected, an error message will be printed to the error log.
* If the channel is closed a ChannelClosed exception will be thrown.
*
*
* @param cluster_name the cluster name to connect to. Cannot be null.
* @param target the state provider. If null state will be fetched from coordinator, unless this channel is coordinator.
* @param state_id the substate id for partial state transfer. If null entire state will be transferred.
* @param timeout the timeout for state transfer.
*
* @exception ChannelException The protocol stack cannot be started
* @exception ChannelException Connecting to cluster was not successful
* @exception ChannelClosedException The channel is closed and therefore cannot be used any longer.
* A new channel has to be created first.
* @exception StateTransferException State transfer was not successful
*
*/
public synchronized void connect(String cluster_name,
Address target,
String state_id,
long timeout) throws ChannelException {
startStack(cluster_name);
boolean stateTransferOk=false;
boolean joinSuccessful=false;
boolean canFetchState=false;
// only connect if we are not a unicast channel
if(cluster_name != null) {
try {
Event connect_event=new Event(Event.CONNECT_WITH_STATE_TRANSFER, cluster_name);
Object res=downcall(connect_event); // waits forever until
// connected (or channel is
// closed)
joinSuccessful=!(res != null && res instanceof Exception);
if(!joinSuccessful) {
stopStack(true, false);
init();
throw new ChannelException("connect() failed", (Throwable)res);
}
connected=true;
notifyChannelConnected(this);
canFetchState=getView() != null && getView().size() > 1;
// if I am not the only member in cluster then
if(canFetchState) {
try {
// fetch state from target
stateTransferOk=getState(target, state_id, timeout, false);
if(!stateTransferOk) {
throw new StateTransferException(getLocalAddress() + " could not fetch state "
+ state_id
+ " from "
+ target);
}
}
catch(Exception e) {
throw new StateTransferException(getLocalAddress() + " could not fetch state "
+ state_id
+ " from "
+ target, e);
}
}
}
finally {
if(flush_supported && canFetchState)
stopFlush();
}
}
}
/**
* Disconnects the channel if it is connected. If the channel is closed,
* this operation is ignored<BR>
* Otherwise the following actions happen in the listed order<BR>
* <ol>
* <li> The JChannel sends a DISCONNECT event down the protocol stack<BR>
* <li> Blocks until the event has returned<BR>
* <li> Sends a STOP_QUEING event down the stack<BR>
* <li> Stops the protocol stack by calling ProtocolStack.stop()<BR>
* <li> Notifies the listener, if the listener is available<BR>
* </ol>
*/
public synchronized void disconnect() {
if(closed) return;
if(connected) {
if(cluster_name != null) {
// Send down a DISCONNECT event, which travels down to the GMS, where a response is returned
Event disconnect_event=new Event(Event.DISCONNECT, local_addr);
down(disconnect_event); // DISCONNECT is handled by each layer
}
connected=false;
stopStack(true, false);
notifyChannelDisconnected(this);
init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining
}
}
/**
* Destroys the channel.
* After this method has been called, the channel us unusable.<BR>
* This operation will disconnect the channel and close the channel receive queue immediately<BR>
*/
public synchronized void close() {
_close(true, true); // by default disconnect before closing channel and close mq
}
/** Shuts down the channel without disconnecting */
public synchronized void shutdown() {
down(new Event(Event.SHUTDOWN));
_close(false, true); // by default disconnect before closing channel and close mq
}
/**
* Opens the channel. Note that the channel is only open, but <em>not connected</em>.
* This does the following actions:
* <ol>
* <li> Resets the receiver queue by calling Queue.reset
* <li> Sets up the protocol stack by calling ProtocolStack.setup
* <li> Sets the closed flag to false
* </ol>
*/
public synchronized void open() throws ChannelException {
if(!closed)
throw new ChannelException("channel is already open");
try {
mq.reset();
// new stack is created on open() - bela June 12 2003
prot_stack=new ProtocolStack(this, props);
prot_stack.setup();
closed=false;
}
catch(Exception e) {
throw new ChannelException("failed to open channel" , e);
}
}
/**
* returns true if the Open operation has been called successfully
*/
public boolean isOpen() {
return !closed;
}
/**
* returns true if the Connect operation has been called successfully
*/
public boolean isConnected() {
return connected;
}
public int getNumMessages() {
return mq != null? mq.size() : -1;
}
public String dumpQueue() {
return Util.dumpQueue(mq);
}
/**
* Returns a map of statistics of the various protocols and of the channel itself.
* @return Map<String,Map>. A map where the keys are the protocols ("channel" pseudo key is
* used for the channel itself") and the values are property maps.
*/
public Map dumpStats() {
Map retval=prot_stack.dumpStats();
if(retval != null) {
Map tmp=dumpChannelStats();
if(tmp != null)
retval.put("channel", tmp);
}
return retval;
}
private Map dumpChannelStats() {
Map retval=new HashMap();
retval.put("sent_msgs", new Long(sent_msgs));
retval.put("sent_bytes", new Long(sent_bytes));
retval.put("received_msgs", new Long(received_msgs));
retval.put("received_bytes", new Long(received_bytes));
return retval;
}
/**
* Sends a message through the protocol stack.
* Implements the Transport interface.
*
* @param msg the message to be sent through the protocol stack,
* the destination of the message is specified inside the message itself
* @exception ChannelNotConnectedException
* @exception ChannelClosedException
*/
public void send(Message msg) throws ChannelNotConnectedException, ChannelClosedException {
checkClosedOrNotConnected();
if(msg == null)
throw new NullPointerException("msg is null");
if(stats) {
sent_msgs++;
sent_bytes+=msg.getLength();
}
down(new Event(Event.MSG, msg));
}
/**
* creates a new message with the destination address, and the source address
* and the object as the message value
* @param dst - the destination address of the message, null for all members
* @param src - the source address of the message
* @param obj - the value of the message
* @exception ChannelNotConnectedException
* @exception ChannelClosedException
* @see JChannel#send
*/
public void send(Address dst, Address src, Serializable obj) throws ChannelNotConnectedException, ChannelClosedException {
send(new Message(dst, src, obj));
}
/**
* Blocking receive method.
* This method returns the object that was first received by this JChannel and that has not been
* received before. After the object is received, it is removed from the receive queue.<BR>
* If you only want to inspect the object received without removing it from the queue call
* JChannel.peek<BR>
* If no messages are in the receive queue, this method blocks until a message is added or the operation times out<BR>
* By specifying a timeout of 0, the operation blocks forever, or until a message has been received.
* @param timeout the number of milliseconds to wait if the receive queue is empty. 0 means wait forever
* @exception TimeoutException if a timeout occured prior to a new message was received
* @exception ChannelNotConnectedException
* @exception ChannelClosedException
* @see JChannel#peek
* @deprecated Use a {@link Receiver} instead
*/
public Object receive(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException {
checkClosedOrNotConnected();
try {
Event evt=(timeout <= 0)? (Event)mq.remove() : (Event)mq.remove(timeout);
Object retval=getEvent(evt);
evt=null;
return retval;
}
catch(QueueClosedException queue_closed) {
throw new ChannelClosedException();
}
catch(TimeoutException t) {
throw t;
}
catch(Exception e) {
if(log.isErrorEnabled()) log.error("exception: " + e);
return null;
}
}
/**
* Just peeks at the next message, view or block. Does <em>not</em> install
* new view if view is received<BR>
* Does the same thing as JChannel.receive but doesn't remove the object from the
* receiver queue
*/
public Object peek(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException {
checkClosedOrNotConnected();
try {
Event evt=(timeout <= 0)? (Event)mq.peek() : (Event)mq.peek(timeout);
Object retval=getEvent(evt);
evt=null;
return retval;
}
catch(QueueClosedException queue_closed) {
if(log.isErrorEnabled()) log.error("exception: " + queue_closed);
return null;
}
catch(TimeoutException t) {
return null;
}
catch(Exception e) {
if(log.isErrorEnabled()) log.error("exception: " + e);
return null;
}
}
/**
* Returns the current view.
* <BR>
* If the channel is not connected or if it is closed it will return null.
* <BR>
* @return returns the current group view, or null if the channel is closed or disconnected
*/
public View getView() {
return closed || !connected ? null : my_view;
}
/**
* returns the local address of the channel
* returns null if the channel is closed
*/
public Address getLocalAddress() {
return closed ? null : local_addr;
}
/**
* returns the name of the channel
* if the channel is not connected or if it is closed it will return null
* @deprecated Use {@link #getClusterName()} instead
*/
public String getChannelName() {
return closed ? null : !connected ? null : cluster_name;
}
public String getClusterName() {
return closed ? null : !connected ? null : cluster_name;
}
/**
* Sets a channel option. The options can be one of the following:
* <UL>
* <LI> Channel.BLOCK
* <LI> Channel.LOCAL
* <LI> Channel.AUTO_RECONNECT
* <LI> Channel.AUTO_GETSTATE
* </UL>
* <P>
* There are certain dependencies between the options that you can set,
* I will try to describe them here.
* <P>
* Option: Channel.BLOCK<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true will set setOpt(VIEW, true) and the JChannel will receive BLOCKS and VIEW events<BR>
*<BR>
* Option: LOCAL<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true the JChannel will receive messages that it self sent out.<BR>
*<BR>
* Option: AUTO_RECONNECT<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true and the JChannel will try to reconnect when it is being closed<BR>
*<BR>
* Option: AUTO_GETSTATE<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true, the AUTO_RECONNECT will be set to true and the JChannel will try to get the state after a close and reconnect happens<BR>
* <BR>
*
* @param option the parameter option Channel.VIEW, Channel.SUSPECT, etc
* @param value the value to set for this option
*
*/
public void setOpt(int option, Object value) {
if(closed) {
if(log.isWarnEnabled()) log.warn("channel is closed; option not set !");
return;
}
switch(option) {
case VIEW:
if(log.isWarnEnabled())
log.warn("option VIEW has been deprecated (it is always true now); this option is ignored");
break;
case SUSPECT:
if(log.isWarnEnabled())
log.warn("option SUSPECT has been deprecated (it is always true now); this option is ignored");
break;
case BLOCK:
if(value instanceof Boolean)
receive_blocks=((Boolean)value).booleanValue();
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
case GET_STATE_EVENTS:
if(log.isWarnEnabled())
log.warn("option GET_STATE_EVENTS has been deprecated (it is always true now); this option is ignored");
break;
case LOCAL:
if(value instanceof Boolean)
receive_local_msgs=((Boolean)value).booleanValue();
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
case AUTO_RECONNECT:
if(value instanceof Boolean)
auto_reconnect=((Boolean)value).booleanValue();
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
case AUTO_GETSTATE:
if(value instanceof Boolean) {
auto_getstate=((Boolean)value).booleanValue();
if(auto_getstate)
auto_reconnect=true;
}
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
default:
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known");
break;
}
}
/**
* returns the value of an option.
* @param option the option you want to see the value for
* @return the object value, in most cases java.lang.Boolean
* @see JChannel#setOpt
*/
public Object getOpt(int option) {
switch(option) {
case VIEW:
return Boolean.TRUE;
case BLOCK:
return receive_blocks ? Boolean.TRUE : Boolean.FALSE;
case SUSPECT:
return Boolean.TRUE;
case AUTO_RECONNECT:
return auto_reconnect ? Boolean.TRUE : Boolean.FALSE;
case AUTO_GETSTATE:
return auto_getstate ? Boolean.TRUE : Boolean.FALSE;
case GET_STATE_EVENTS:
return Boolean.TRUE;
case LOCAL:
return receive_local_msgs ? Boolean.TRUE : Boolean.FALSE;
default:
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known");
return null;
}
}
/**
* Called to acknowledge a block() (callback in <code>MembershipListener</code> or
* <code>BlockEvent</code> received from call to <code>receive()</code>).
* After sending blockOk(), no messages should be sent until a new view has been received.
* Calling this method on a closed channel has no effect.
*/
public void blockOk() {
}
/**
* Retrieves the current group state. Sends GET_STATE event down to STATE_TRANSFER layer.
* Blocks until STATE_TRANSFER sends up a GET_STATE_OK event or until <code>timeout</code>
* milliseconds have elapsed. The argument of GET_STATE_OK should be a single object.
* @param target the target member to receive the state from. if null, state is retrieved from coordinator
* @param timeout the number of milliseconds to wait for the operation to complete successfully. 0 waits until
* the state has been received
* @return true of the state was received, false if the operation timed out
*/
public boolean getState(Address target, long timeout) throws ChannelNotConnectedException, ChannelClosedException {
return getState(target,null,timeout);
}
/**
* Retrieves a substate (or partial state) from the target.
* @param target State provider. If null, coordinator is used
* @param state_id The ID of the substate. If null, the entire state will be transferred
* @param timeout the number of milliseconds to wait for the operation to complete successfully. 0 waits until
* the state has been received
* @return
* @throws ChannelNotConnectedException
* @throws ChannelClosedException
*/
public boolean getState(Address target, String state_id, long timeout) throws ChannelNotConnectedException, ChannelClosedException {
return getState(target, state_id, timeout, true);
}
/**
* Retrieves a substate (or partial state) from the target.
* @param target State provider. If null, coordinator is used
* @param state_id The ID of the substate. If null, the entire state will be transferred
* @param timeout the number of milliseconds to wait for the operation to complete successfully. 0 waits until
* the state has been received
* @return
* @throws ChannelNotConnectedException
* @throws ChannelClosedException
*/
public boolean getState(Address target, String state_id, long timeout,boolean useFlushIfPresent) throws ChannelNotConnectedException, ChannelClosedException {
checkClosedOrNotConnected();
if(!state_transfer_supported) {
throw new IllegalStateException("fetching state will fail as state transfer is not supported. "
+ "Add one of the STATE_TRANSFER protocols to your protocol configuration");
}
if(target == null)
target=determineCoordinator();
if(target != null && local_addr != null && target.equals(local_addr)) {
if(log.isTraceEnabled())
log.trace("cannot get state from myself (" + target + "): probably the first member");
return false;
}
StateTransferInfo state_info=new StateTransferInfo(target, state_id, timeout);
boolean initiateFlush = flush_supported && useFlushIfPresent;
if(initiateFlush)
startFlush(false);
state_promise.reset();
down(new Event(Event.GET_STATE, state_info));
Boolean b=state_promise.getResult(state_info.timeout);
if(initiateFlush)
stopFlush();
boolean state_transfer_successfull = b != null && b.booleanValue();
if(!state_transfer_successfull)
down(new Event(Event.RESUME_STABLE));
return state_transfer_successfull;
}
/**
* Retrieves the current group state. Sends GET_STATE event down to STATE_TRANSFER layer.
* Blocks until STATE_TRANSFER sends up a GET_STATE_OK event or until <code>timeout</code>
* milliseconds have elapsed. The argument of GET_STATE_OK should be a vector of objects.
* @param targets - the target members to receive the state from ( an Address list )
* @param timeout - the number of milliseconds to wait for the operation to complete successfully
* @return true of the state was received, false if the operation timed out
* @deprecated Not really needed - we always want to get the state from a single member,
* use {@link #getState(org.jgroups.Address, long)} instead
*/
public boolean getAllStates(Vector targets, long timeout) throws ChannelNotConnectedException, ChannelClosedException {
throw new UnsupportedOperationException("use getState() instead");
}
/**
* Called by the application is response to receiving a <code>getState()</code> object when
* calling <code>receive()</code>.
* When the application receives a getState() message on the receive() method,
* it should call returnState() to reply with the state of the application
* @param state The state of the application as a byte buffer
* (to send over the network).
*/
public void returnState(byte[] state) {
try {
StateTransferInfo state_info=new StateTransferInfo(null, null, 0L, state);
applstate_exchanger.exchange(state_info);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt();
}
}
/**
* Returns a substate as indicated by state_id
* @param state
* @param state_id
*/
public void returnState(byte[] state, String state_id) {
try {
StateTransferInfo state_info=new StateTransferInfo(null, state_id, 0L, state);
applstate_exchanger.exchange(state_info);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt();
}
}
/**
* Callback method <BR>
* Called by the ProtocolStack when a message is received.
* It will be added to the message queue from which subsequent
* <code>Receive</code>s will dequeue it.
* @param evt the event carrying the message from the protocol stack
*/
public Object up(Event evt) {
int type=evt.getType();
Message msg;
switch(type) {
case Event.MSG:
msg=(Message)evt.getArg();
if(stats) {
received_msgs++;
received_bytes+=msg.getLength();
}
if(!receive_local_msgs) { // discard local messages (sent by myself to me)
if(local_addr != null && msg.getSrc() != null)
if(local_addr.equals(msg.getSrc()))
return null;
}
break;
case Event.VIEW_CHANGE:
View tmp=(View)evt.getArg();
if(tmp instanceof MergeView)
my_view=new View(tmp.getVid(), tmp.getMembers());
else
my_view=tmp;
/*
* Bela&Vladimir Oct 27th,2006 (JGroups 2.4)- we need to switch to
* connected=true because client can invoke channel.getView() in
* viewAccepted() callback invoked on this thread
* (see Event.VIEW_CHANGE handling below)
*/
// not good: we are only connected when we returned from connect() - bela June 22 2007
// if(connected == false) {
// connected=true;
// }
break;
case Event.CONFIG:
Map<String,Object> config=(Map<String,Object>)evt.getArg();
if(config != null) {
if(config.containsKey("state_transfer")) {
state_transfer_supported=((Boolean)config.get("state_transfer")).booleanValue();
}
if(config.containsKey("flush_supported")) {
flush_supported=((Boolean)config.get("flush_supported")).booleanValue();
}
}
break;
case Event.INFO:
Map<String, Object> m = (Map<String, Object>) evt.getArg();
info.putAll(m);
break;
case Event.GET_STATE_OK:
StateTransferInfo state_info=(StateTransferInfo)evt.getArg();
byte[] state=state_info.state;
state_promise.setResult(state != null? Boolean.TRUE : Boolean.FALSE);
if(up_handler != null) {
return up_handler.up(evt);
}
if(state != null) {
String state_id=state_info.state_id;
if(receiver != null) {
try {
if(receiver instanceof ExtendedReceiver && state_id!=null)
((ExtendedReceiver)receiver).setState(state_id, state);
else
receiver.setState(state);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling setState() in receiver", t);
}
}
else {
try {mq.add(new Event(Event.STATE_RECEIVED, state_info));} catch(Exception e) {}
}
}
break;
case Event.STATE_TRANSFER_INPUTSTREAM_CLOSED:
state_promise.setResult(Boolean.TRUE);
break;
case Event.STATE_TRANSFER_INPUTSTREAM:
StateTransferInfo sti=(StateTransferInfo)evt.getArg();
InputStream is=sti.inputStream;
//Oct 13,2006 moved to down() when Event.STATE_TRANSFER_INPUTSTREAM_CLOSED is received
//state_promise.setResult(is != null? Boolean.TRUE : Boolean.FALSE);
if(up_handler != null) {
return up_handler.up(evt);
}
if(is != null) {
if(receiver instanceof ExtendedReceiver) {
try {
if(sti.state_id == null)
((ExtendedReceiver)receiver).setState(is);
else
((ExtendedReceiver)receiver).setState(sti.state_id, is);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling setState() in receiver", t);
}
}
else if(receiver instanceof Receiver){
if(log.isWarnEnabled()){
log.warn("Channel has STREAMING_STATE_TRANSFER, however," +
" application does not implement ExtendedMessageListener. State is not transfered");
Util.close(is);
}
}
else {
try {
mq.add(new Event(Event.STATE_TRANSFER_INPUTSTREAM, sti));
}
catch(Exception e) {
}
}
}
break;
case Event.SET_LOCAL_ADDRESS:
local_addr_promise.setResult((Address)evt.getArg());
break;
case Event.EXIT:
handleExit(evt);
return null; // no need to pass event up; already done in handleExit()
default:
break;
}
// If UpHandler is installed, pass all events to it and return (UpHandler is e.g. a building block)
if(up_handler != null) {
Object ret=up_handler.up(evt);
if(type == Event.UNBLOCK){
flush_unblock_promise.setResult(Boolean.TRUE);
}
return ret;
}
switch(type) {
case Event.MSG:
if(receiver != null) {
try {
receiver.receive((Message)evt.getArg());
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling receive() in receiver", t);
}
return null;
}
break;
case Event.VIEW_CHANGE:
if(receiver != null) {
try {
receiver.viewAccepted((View)evt.getArg());
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling viewAccepted() in receiver", t);
}
return null;
}
break;
case Event.SUSPECT:
if(receiver != null) {
try {
receiver.suspect((Address)evt.getArg());
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling suspect() in receiver", t);
}
return null;
}
break;
case Event.GET_APPLSTATE:
if(receiver != null) {
StateTransferInfo state_info=(StateTransferInfo)evt.getArg();
byte[] tmp_state=null;
String state_id=state_info.state_id;
try {
if(receiver instanceof ExtendedReceiver && state_id!=null) {
tmp_state=((ExtendedReceiver)receiver).getState(state_id);
}
else {
tmp_state=receiver.getState();
}
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling getState() in receiver", t);
}
return new StateTransferInfo(null, state_id, 0L, tmp_state);
}
break;
case Event.STATE_TRANSFER_OUTPUTSTREAM:
StateTransferInfo sti=(StateTransferInfo)evt.getArg();
OutputStream os=sti.outputStream;
if(receiver instanceof ExtendedReceiver) {
if(os != null) {
try {
if(sti.state_id == null)
((ExtendedReceiver)receiver).getState(os);
else
((ExtendedReceiver)receiver).getState(sti.state_id, os);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling getState() in receiver", t);
}
}
}
else if(receiver instanceof Receiver){
if(log.isWarnEnabled()){
log.warn("Channel has STREAMING_STATE_TRANSFER, however," +
" application does not implement ExtendedMessageListener. State is not transfered");
Util.close(os);
}
}
break;
case Event.BLOCK:
if(!receive_blocks) { // discard if client has not set 'receiving blocks' to 'on'
return Boolean.TRUE;
}
if(receiver != null) {
try {
receiver.block();
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed calling block() in receiver", t);
}
return Boolean.TRUE;
}
break;
case Event.UNBLOCK:
//invoke receiver if block receiving is on
if(receive_blocks && receiver instanceof ExtendedReceiver) {
try {
((ExtendedReceiver)receiver).unblock();
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed calling unblock() in receiver", t);
}
}
//flip promise
flush_unblock_promise.setResult(Boolean.TRUE);
return null;
default:
break;
}
if(type == Event.MSG || type == Event.VIEW_CHANGE || type == Event.SUSPECT ||
type == Event.GET_APPLSTATE || type== Event.STATE_TRANSFER_OUTPUTSTREAM
|| type == Event.BLOCK || type == Event.UNBLOCK) {
try {
mq.add(evt);
}
catch(QueueClosedException queue_closed) {
; // ignore
}
catch(Exception e) {
if(log.isWarnEnabled()) log.warn("exception adding event " + evt + " to message queue", e);
}
}
if(type == Event.GET_APPLSTATE) {
try {
return applstate_exchanger.exchange(null);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt();
return null;
}
}
return null;
}
/**
* Sends a message through the protocol stack if the stack is available
* @param evt the message to send down, encapsulated in an event
*/
public void down(Event evt) {
if(evt == null) return;
switch(evt.getType()) {
case Event.CONFIG:
try {
Map<String,Object> m=(Map<String,Object>)evt.getArg();
if(m != null) {
additional_data.putAll(m);
if(m.containsKey("additional_data")) {
byte[] tmp=(byte[])m.get("additional_data");
if(local_addr instanceof IpAddress)
((IpAddress)local_addr).setAdditionalData(tmp);
}
}
}
catch(Throwable t) {
if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t);
}
break;
}
prot_stack.down(evt);
}
public Object downcall(Event evt) {
if(evt == null) return null;
switch(evt.getType()) {
case Event.CONFIG:
try {
Map<String,Object> m=(Map<String,Object>)evt.getArg();
if(m != null) {
additional_data.putAll(m);
if(m.containsKey("additional_data")) {
byte[] tmp=(byte[])m.get("additional_data");
if(local_addr instanceof IpAddress)
((IpAddress)local_addr).setAdditionalData(tmp);
}
}
}
catch(Throwable t) {
if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t);
}
break;
}
return prot_stack.down(evt);
}
public String toString(boolean details) {
StringBuilder sb=new StringBuilder();
sb.append("local_addr=").append(local_addr).append('\n');
sb.append("cluster_name=").append(cluster_name).append('\n');
sb.append("my_view=").append(my_view).append('\n');
sb.append("connected=").append(connected).append('\n');
sb.append("closed=").append(closed).append('\n');
if(mq != null)
sb.append("incoming queue size=").append(mq.size()).append('\n');
if(details) {
sb.append("receive_blocks=").append(receive_blocks).append('\n');
sb.append("receive_local_msgs=").append(receive_local_msgs).append('\n');
sb.append("auto_reconnect=").append(auto_reconnect).append('\n');
sb.append("auto_getstate=").append(auto_getstate).append('\n');
sb.append("state_transfer_supported=").append(state_transfer_supported).append('\n');
sb.append("props=").append(props).append('\n');
}
return sb.toString();
}
/* ----------------------------------- Private Methods ------------------------------------- */
protected final void init(ProtocolStackConfigurator configurator) throws ChannelException {
if(log.isInfoEnabled())
log.info("JGroups version: " + Version.description);
ConfiguratorFactory.substituteVariables(configurator); // replace vars with system props
props=configurator.getProtocolStackString();
prot_stack=new ProtocolStack(this, props);
try {
prot_stack.setup(); // Setup protocol stack (creates protocol, calls init() on them)
}
catch(Throwable e) {
throw new ChannelException("unable to setup the protocol stack", e);
}
}
/**
* Initializes all variables. Used after <tt>close()</tt> or <tt>disconnect()</tt>,
* to be ready for new <tt>connect()</tt>
*/
private void init() {
local_addr=null;
cluster_name=null;
my_view=null;
// changed by Bela Sept 25 2003
//if(mq != null && mq.closed())
// mq.reset();
connected=false;
}
private void startStack(String cluster_name) throws ChannelException {
/*make sure the channel is not closed*/
checkClosed();
/*if we already are connected, then ignore this*/
if(connected) {
if(log.isTraceEnabled()) log.trace("already connected to " + cluster_name);
return;
}
/*make sure we have a valid channel name*/
if(cluster_name == null) {
if(log.isDebugEnabled()) log.debug("cluster_name is null, assuming unicast channel");
}
else
this.cluster_name=cluster_name;
try {
prot_stack.startStack(cluster_name); // calls start() in all protocols, from top to bottom
}
catch(Throwable e) {
throw new ChannelException("failed to start protocol stack", e);
}
String tmp=Util.getProperty(new String[]{Global.CHANNEL_LOCAL_ADDR_TIMEOUT, "local_addr.timeout"},
null, null, false, "30000");
LOCAL_ADDR_TIMEOUT=Long.parseLong(tmp);
/* Wait LOCAL_ADDR_TIMEOUT milliseconds for local_addr to have a non-null value (set by SET_LOCAL_ADDRESS) */
local_addr=local_addr_promise.getResult(LOCAL_ADDR_TIMEOUT);
if(local_addr == null) {
log.fatal("local_addr is null; cannot connect");
throw new ChannelException("local_addr is null");
}
/*create a temporary view, assume this channel is the only member and is the coordinator*/
Vector t=new Vector(1);
t.addElement(local_addr);
my_view=new View(local_addr, 0, t); // create a dummy view
}
/**
* health check<BR>
* throws a ChannelClosed exception if the channel is closed
*/
protected void checkClosed() throws ChannelClosedException {
if(closed)
throw new ChannelClosedException();
}
protected void checkClosedOrNotConnected() throws ChannelNotConnectedException, ChannelClosedException {
if(closed)
throw new ChannelClosedException();
if(!connected)
throw new ChannelNotConnectedException();
}
/**
* returns the value of the event<BR>
* These objects will be returned<BR>
* <PRE>
* <B>Event Type - Return Type</B>
* Event.MSG - returns a Message object
* Event.VIEW_CHANGE - returns a View object
* Event.SUSPECT - returns a SuspectEvent object
* Event.BLOCK - returns a new BlockEvent object
* Event.GET_APPLSTATE - returns a GetStateEvent object
* Event.STATE_RECEIVED- returns a SetStateEvent object
* Event.Exit - returns an ExitEvent object
* All other - return the actual Event object
* </PRE>
* @param evt - the event of which you want to extract the value
* @return the event value if it matches the select list,
* returns null if the event is null
* returns the event itself if a match (See above) can not be made of the event type
*/
static Object getEvent(Event evt) {
if(evt == null)
return null; // correct ?
switch(evt.getType()) {
case Event.MSG:
return evt.getArg();
case Event.VIEW_CHANGE:
return evt.getArg();
case Event.SUSPECT:
return new SuspectEvent(evt.getArg());
case Event.BLOCK:
return new BlockEvent();
case Event.UNBLOCK:
return new UnblockEvent();
case Event.GET_APPLSTATE:
StateTransferInfo info=(StateTransferInfo)evt.getArg();
return new GetStateEvent(info.target, info.state_id);
case Event.STATE_RECEIVED:
info=(StateTransferInfo)evt.getArg();
return new SetStateEvent(info.state, info.state_id);
case Event.STATE_TRANSFER_OUTPUTSTREAM:
info = (StateTransferInfo)evt.getArg();
return new StreamingGetStateEvent(info.outputStream,info.state_id);
case Event.STATE_TRANSFER_INPUTSTREAM:
info = (StateTransferInfo)evt.getArg();
return new StreamingSetStateEvent(info.inputStream,info.state_id);
case Event.EXIT:
return new ExitEvent();
default:
return evt;
}
}
/**
* Disconnects and closes the channel.
* This method does the following things
* <ol>
* <li>Calls <code>this.disconnect</code> if the disconnect parameter is true
* <li>Calls <code>Queue.close</code> on mq if the close_mq parameter is true
* <li>Calls <code>ProtocolStack.stop</code> on the protocol stack
* <li>Calls <code>ProtocolStack.destroy</code> on the protocol stack
* <li>Sets the channel closed and channel connected flags to true and false
* <li>Notifies any channel listener of the channel close operation
* </ol>
*/
protected void _close(boolean disconnect, boolean close_mq) {
if(closed)
return;
if(disconnect)
disconnect(); // leave group if connected
if(close_mq)
closeMessageQueue(false);
stopStack(true, true);
closed=true;
connected=false;
notifyChannelClosed(this);
init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining
}
protected void stopStack(boolean disconnect, boolean destroy) {
if(prot_stack != null) {
try {
if(disconnect)
prot_stack.stopStack(cluster_name);
if(destroy)
prot_stack.destroy();
}
catch(Exception e) {
if(log.isErrorEnabled())
log.error("failed destroying the protocol stack", e);
}
}
}
public final void closeMessageQueue(boolean flush_entries) {
if(mq != null)
mq.close(flush_entries);
}
/**
* Creates a separate thread to close the protocol stack.
* This is needed because the thread that called JChannel.up() with the EXIT event would
* hang waiting for up() to return, while up() actually tries to kill that very thread.
* This way, we return immediately and allow the thread to terminate.
*/
private void handleExit(Event evt) {
notifyChannelShunned();
if(closer != null && !closer.isAlive())
closer=null;
if(closer == null) {
if(log.isDebugEnabled())
log.debug("received an EXIT event, will leave the channel");
closer=new CloserThread(evt);
closer.start();
}
}
public boolean flushSupported() {
return flush_supported;
}
/**
* Will perform a flush of the system, ie. all pending messages are flushed out of the
* system and all members ack their reception. After this call returns, no member will
* be sending any messages until {@link #stopFlush()} is called.
* <p>
* In case of flush collisions, random sleep time backoff algorithm is employed and
* flush is reattempted for numberOfAttempts. Therefore this method is guaranteed
* to return after timeout x numberOfAttempts miliseconds.
*
* @param automatic_resume Call {@link #stopFlush()} after the flush
* @return true if FLUSH completed within the timeout
*/
public boolean startFlush(boolean automatic_resume) {
if(!flush_supported) {
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
boolean successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND));
if(automatic_resume)
stopFlush();
return successfulFlush;
}
/**
* Performs a partial flush in a cluster for flush participants.
* <p>
* All pending messages are flushed out only for flush participants.
* Remaining members in a cluster are not included in flush.
* Flush participants should be a proper subset of a current view.
*
* <p>
* In case of flush collisions, random sleep time backoff algorithm is employed and
* flush is reattempted for numberOfAttempts. Therefore this method is guaranteed
* to return after timeout x numberOfAttempts miliseconds.
*
* @param automatic_resume Call {@link #stopFlush()} after the flush
* @return true if FLUSH completed within the timeout
*/
public boolean startFlush(List<Address> flushParticipants,boolean automatic_resume) {
boolean successfulFlush = false;
if(!flush_supported){
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
View v = getView();
if(v != null && v.getMembers().containsAll(flushParticipants)){
successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND, flushParticipants));
}else{
throw new IllegalArgumentException("Current view " + v
+ " does not contain all flush participants "
+ flushParticipants);
}
if(automatic_resume)
stopFlush(flushParticipants);
return successfulFlush;
}
/**
* Will perform a flush of the system, ie. all pending messages are flushed out of the
* system and all members ack their reception. After this call returns, no member will
* be sending any messages until {@link #stopFlush()} is called.
* <p>
* In case of flush collisions, random sleep time backoff algorithm is employed and
* flush is reattempted for numberOfAttempts. Therefore this method is guaranteed
* to return after timeout x numberOfAttempts miliseconds.
* @param timeout
* @param automatic_resume Call {@link #stopFlush()} after the flush
* @return true if FLUSH completed within the timeout
*/
public boolean startFlush(long timeout, boolean automatic_resume) {
return startFlush(automatic_resume);
}
public void stopFlush() {
if(!flush_supported) {
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
flush_unblock_promise.reset();
down(new Event(Event.RESUME));
//do not return until UNBLOCK event is received
try {
flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT);
}
catch(TimeoutException te) {
log.warn("Timeout waiting for UNBLOCK event at " + getLocalAddress());
}
}
public void stopFlush(List<Address> flushParticipants) {
if(!flush_supported) {
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
flush_unblock_promise.reset();
down(new Event(Event.RESUME, flushParticipants));
// do not return until UNBLOCK event is received
try {
flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT);
}
catch(TimeoutException te) {
log.warn("Timeout waiting for UNBLOCK event at " + getLocalAddress());
}
}
@Override
public Map<String, Object> getInfo(){
return new HashMap<String, Object>(info);
}
public void setInfo(String key, Object value) {
if(key != null)
info.put(key, value);
}
Address determineCoordinator() {
Vector mbrs=my_view != null? my_view.getMembers() : null;
if(mbrs == null)
return null;
if(!mbrs.isEmpty())
return (Address)mbrs.firstElement();
return null;
}
/* ------------------------------- End of Private Methods ---------------------------------- */
class CloserThread extends Thread {
final Event evt;
final Thread t=null;
CloserThread(Event evt) {
super(Util.getGlobalThreadGroup(), "CloserThread");
this.evt=evt;
setDaemon(true);
}
public void run() {
try {
String old_cluster_name=cluster_name; // remember because close() will null it
if(log.isDebugEnabled())
log.debug("closing the channel");
_close(false, false); // do not disconnect before closing channel, do not close mq (yet !)
if(up_handler != null)
up_handler.up(this.evt);
else {
try {
if(receiver == null)
mq.add(this.evt);
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("exception: " + ex);
}
}
if(mq != null) {
Util.sleep(500); // give the mq thread a bit of time to deliver EXIT to the application
try {
mq.close(false);
}
catch(Exception ex) {
}
}
if(auto_reconnect) {
try {
if(log.isDebugEnabled()) log.debug("reconnecting to group " + old_cluster_name);
open();
if(additional_data != null) {
// send previously set additional_data down the stack - other protocols (e.g. TP) use it
Map<String,Object> m=new HashMap<String,Object>(additional_data);
down(new Event(Event.CONFIG, m));
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("failure reopening channel: " + ex);
return;
}
while(!connected) {
try {
connect(old_cluster_name);
notifyChannelReconnected(local_addr);
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("failure reconnecting to channel, retrying", ex);
Util.sleep(1000); // sleep 1 sec between reconnect attempts
}
}
}
if(auto_getstate) {
if(log.isDebugEnabled())
log.debug("fetching the state (auto_getstate=true)");
boolean rc=JChannel.this.getState(null, GET_STATE_DEFAULT_TIMEOUT);
if(log.isDebugEnabled()) {
if(rc)
log.debug("state was retrieved successfully");
else
log.debug("state transfer failed");
}
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("exception: " + ex);
}
finally {
closer=null;
}
}
}
}
|
src/org/jgroups/JChannel.java
|
package org.jgroups;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.jgroups.conf.ConfiguratorFactory;
import org.jgroups.conf.ProtocolStackConfigurator;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.ProtocolStack;
import org.jgroups.stack.StateTransferInfo;
import org.jgroups.util.Promise;
import org.jgroups.util.Queue;
import org.jgroups.util.QueueClosedException;
import org.jgroups.util.Util;
import org.w3c.dom.Element;
import java.io.File;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Exchanger;
/**
* JChannel is a pure Java implementation of Channel.
* When a JChannel object is instantiated it automatically sets up the
* protocol stack.
* <p>
* <B>Properties</B>
* <P>
* Properties are used to configure a channel, and are accepted in
* several forms; the String form is described here.
* A property string consists of a number of properties separated by
* colons. For example:
* <p>
* <pre>"<prop1>(arg1=val1):<prop2>(arg1=val1;arg2=val2):<prop3>:<propn>"</pre>
* <p>
* Each property relates directly to a protocol layer, which is
* implemented as a Java class. When a protocol stack is to be created
* based on the above property string, the first property becomes the
* bottom-most layer, the second one will be placed on the first, etc.:
* the stack is created from the bottom to the top, as the string is
* parsed from left to right. Each property has to be the name of a
* Java class that resides in the
* {@link org.jgroups.protocols} package.
* <p>
* Note that only the base name has to be given, not the fully specified
* class name (e.g., UDP instead of org.jgroups.protocols.UDP).
* <p>
* Each layer may have 0 or more arguments, which are specified as a
* list of name/value pairs in parentheses directly after the property.
* In the example above, the first protocol layer has 1 argument,
* the second 2, the third none. When a layer is created, these
* properties (if there are any) will be set in a layer by invoking
* the layer's setProperties() method
* <p>
* As an example the property string below instructs JGroups to create
* a JChannel with protocols UDP, PING, FD and GMS:<p>
* <pre>"UDP(mcast_addr=228.10.9.8;mcast_port=5678):PING:FD:GMS"</pre>
* <p>
* The UDP protocol layer is at the bottom of the stack, and it
* should use mcast address 228.10.9.8. and port 5678 rather than
* the default IP multicast address and port. The only other argument
* instructs FD to output debug information while executing.
* Property UDP refers to a class {@link org.jgroups.protocols.UDP},
* which is subsequently loaded and an instance of which is created as protocol layer.
* If any of these classes are not found, an exception will be thrown and
* the construction of the stack will be aborted.
*
* @author Bela Ban
* @version $Id: JChannel.java,v 1.168 2008/01/30 05:18:38 vlada Exp $
*/
public class JChannel extends Channel {
/**
* The default protocol stack used by the default constructor.
*/
public static final String DEFAULT_PROTOCOL_STACK="udp.xml";
static final String FORCE_PROPS="force.properties";
/* the protocol stack configuration string */
private String props=null;
/*the address of this JChannel instance*/
private Address local_addr=null;
/*the channel (also know as group) name*/
private String cluster_name=null; // group name
/*the latest view of the group membership*/
private View my_view=null;
/*the queue that is used to receive messages (events) from the protocol stack*/
private final Queue mq=new Queue();
/*the protocol stack, used to send and receive messages from the protocol stack*/
private ProtocolStack prot_stack=null;
/** Thread responsible for closing a channel and potentially reconnecting to it (e.g., when shunned). */
protected CloserThread closer=null;
/** To wait until a local address has been assigned */
private final Promise<Address> local_addr_promise=new Promise<Address>();
private final Promise<Boolean> state_promise=new Promise<Boolean>();
private final Exchanger<StateTransferInfo> applstate_exchanger=new Exchanger<StateTransferInfo>();
private final Promise<Boolean> flush_unblock_promise=new Promise<Boolean>();
/** wait until we have a non-null local_addr */
private long LOCAL_ADDR_TIMEOUT=30000; //=Long.parseLong(System.getProperty("local_addr.timeout", "30000"));
/*if the states is fetched automatically, this is the default timeout, 5 secs*/
private static final long GET_STATE_DEFAULT_TIMEOUT=5000;
/*if FLUSH is used channel waits for UNBLOCK event, this is the default timeout, 5 secs*/
private static final long FLUSH_UNBLOCK_TIMEOUT=5000;
/*flag to indicate whether to receive blocks, if this is set to true, receive_views is set to true*/
private boolean receive_blocks=false;
/*flag to indicate whether to receive local messages
*if this is set to false, the JChannel will not receive messages sent by itself*/
private boolean receive_local_msgs=true;
/*flag to indicate whether the channel will reconnect (reopen) when the exit message is received*/
private boolean auto_reconnect=true;
/*flag t indicate whether the state is supposed to be retrieved after the channel is reconnected
*setting this to true, automatically forces auto_reconnect to true*/
private boolean auto_getstate=true;
/*channel connected flag*/
protected volatile boolean connected=false;
/*channel closed flag*/
protected volatile boolean closed=false; // close() has been called, channel is unusable
/** True if a state transfer protocol is available, false otherwise */
private boolean state_transfer_supported=false; // set by CONFIG event from STATE_TRANSFER protocol
/** True if a flush protocol is available, false otherwise */
private volatile boolean flush_supported=false; // set by CONFIG event from FLUSH protocol
/** Provides storage for arbitrary objects. Protocols can send up CONFIG events, and all key-value pairs of
* a CONFIG event will be added to additional_data. On reconnect, a CONFIG event will be sent down by the channel,
* containing all key-value pairs of additional_data
*/
protected final Map<String,Object> additional_data=new HashMap<String,Object>();
protected final ConcurrentMap<String,Object> info=new ConcurrentHashMap<String,Object>();
protected final Log log=LogFactory.getLog(getClass());
/** Collect statistics */
protected boolean stats=true;
protected long sent_msgs=0, received_msgs=0, sent_bytes=0, received_bytes=0;
/** Used by subclass to create a JChannel without a protocol stack, don't use as application programmer */
protected JChannel(boolean no_op) {
;
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* specified by the <code>DEFAULT_PROTOCOL_STACK</code> member.
*
* @throws ChannelException if problems occur during the initialization of
* the protocol stack.
*/
public JChannel() throws ChannelException {
this(DEFAULT_PROTOCOL_STACK);
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration contained by the specified file.
*
* @param properties a file containing a JGroups XML protocol stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration or
* initialization of the protocol stack.
*/
public JChannel(File properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration contained by the specified XML element.
*
* @param properties a XML element containing a JGroups XML protocol stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration or
* initialization of the protocol stack.
*/
public JChannel(Element properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration indicated by the specified URL.
*
* @param properties a URL pointing to a JGroups XML protocol stack
* configuration.
*
* @throws ChannelException if problems occur during the configuration or
* initialization of the protocol stack.
*/
public JChannel(URL properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration based upon the specified properties parameter.
*
* @param properties an old style property string, a string representing a
* system resource containing a JGroups XML configuration,
* a string representing a URL pointing to a JGroups XML
* XML configuration, or a string representing a file name
* that contains a JGroups XML configuration.
*
* @throws ChannelException if problems occur during the configuration and
* initialization of the protocol stack.
*/
public JChannel(String properties) throws ChannelException {
this(ConfiguratorFactory.getStackConfigurator(properties));
}
/**
* Constructs a <code>JChannel</code> instance with the protocol stack
* configuration contained by the protocol stack configurator parameter.
* <p>
* All of the public constructors of this class eventually delegate to this
* method.
*
* @param configurator a protocol stack configurator containing a JGroups
* protocol stack configuration.
*
* @throws ChannelException if problems occur during the initialization of
* the protocol stack.
*/
public JChannel(ProtocolStackConfigurator configurator) throws ChannelException {
init(configurator);
}
/**
* Creates a new JChannel with the protocol stack as defined in the properties
* parameter. an example of this parameter is<BR>
* "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE"<BR>
* Other examples can be found in the ./conf directory<BR>
* @param properties the protocol stack setup; if null, the default protocol stack will be used.
* The properties can also be a java.net.URL object or a string that is a URL spec.
* The JChannel will validate any URL object and String object to see if they are a URL.
* In case of the parameter being a url, the JChannel will try to load the xml from there.
* In case properties is a org.w3c.dom.Element, the ConfiguratorFactory will parse the
* DOM tree with the element as its root element.
* @deprecated Use the constructors with specific parameter types instead.
*/
public JChannel(Object properties) throws ChannelException {
if (properties == null)
properties = DEFAULT_PROTOCOL_STACK;
ProtocolStackConfigurator c=null;
try {
c=ConfiguratorFactory.getStackConfigurator(properties);
}
catch(Exception x) {
throw new ChannelException("unable to load protocol stack", x);
}
init(c);
}
/**
* Returns the protocol stack.
* Currently used by Debugger.
* Specific to JChannel, therefore
* not visible in Channel
*/
public ProtocolStack getProtocolStack() {
return prot_stack;
}
protected Log getLog() {
return log;
}
/**
* returns the protocol stack configuration in string format.
* an example of this property is<BR>
* "UDP:PING:FD:STABLE:NAKACK:UNICAST:FRAG:FLUSH:GMS:VIEW_ENFORCER:STATE_TRANSFER:QUEUE"
*/
public String getProperties() {
return props;
}
public boolean statsEnabled() {
return stats;
}
public void enableStats(boolean stats) {
this.stats=stats;
}
public void resetStats() {
sent_msgs=received_msgs=sent_bytes=received_bytes=0;
}
public long getSentMessages() {return sent_msgs;}
public long getSentBytes() {return sent_bytes;}
public long getReceivedMessages() {return received_msgs;}
public long getReceivedBytes() {return received_bytes;}
public int getNumberOfTasksInTimer() {return prot_stack != null ? prot_stack.timer.size() : -1;}
public int getTimerThreads() {
return prot_stack != null? prot_stack.getTimerThreads() : -1;
}
public String dumpTimerQueue() {
return prot_stack != null? prot_stack.dumpTimerQueue() : "<n/a";
}
/**
* Returns a pretty-printed form of all the protocols. If include_properties is set,
* the properties for each protocol will also be printed.
*/
public String printProtocolSpec(boolean include_properties) {
return prot_stack != null ? prot_stack.printProtocolSpec(include_properties) : null;
}
/**
* Connects the channel to a group.
* If the channel is already connected, an error message will be printed to the error log.
* If the channel is closed a ChannelClosed exception will be thrown.
* This method starts the protocol stack by calling ProtocolStack.start,
* then it sends an Event.CONNECT event down the stack and waits for the return value.
* Once the call returns, the channel listeners are notified and the channel is considered connected.
*
* @param cluster_name A <code>String</code> denoting the group name. Cannot be null.
* @exception ChannelException The protocol stack cannot be started
* @exception ChannelClosedException The channel is closed and therefore cannot be used any longer.
* A new channel has to be created first.
*/
public synchronized void connect(String cluster_name) throws ChannelException {
startStack(cluster_name);
// only connect if we are not a unicast channel
if(cluster_name != null) {
if(flush_supported)
flush_unblock_promise.reset();
Event connect_event=new Event(Event.CONNECT, cluster_name);
Object res=downcall(connect_event); // waits forever until connected (or channel is closed)
if(res != null && res instanceof Exception) { // the JOIN was rejected by the coordinator
stopStack(true, false);
init();
throw new ChannelException("connect() failed", (Throwable)res);
}
//if FLUSH is used do not return from connect() until UNBLOCK event is received
if(flush_supported) {
try {
flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT);
}
catch (TimeoutException timeout) {
if(log.isWarnEnabled())
log.warn(local_addr + " waiting on UNBLOCK after connect() timed out");
}
}
}
connected=true;
notifyChannelConnected(this);
}
/**
* Connects this channel to a group and gets a state from a specified state
* provider.
* <p>
*
* This method essentially invokes
* <code>connect<code> and <code>getState<code> methods successively.
* If FLUSH protocol is in channel's stack definition only one flush is executed for both connecting and
* fetching state rather than two flushes if we invoke <code>connect<code> and <code>getState<code> in succesion.
*
* If the channel is already connected, an error message will be printed to the error log.
* If the channel is closed a ChannelClosed exception will be thrown.
*
*
* @param cluster_name the cluster name to connect to. Cannot be null.
* @param target the state provider. If null state will be fetched from coordinator, unless this channel is coordinator.
* @param state_id the substate id for partial state transfer. If null entire state will be transferred.
* @param timeout the timeout for state transfer.
*
* @exception ChannelException The protocol stack cannot be started
* @exception ChannelException Connecting to cluster was not successful
* @exception ChannelClosedException The channel is closed and therefore cannot be used any longer.
* A new channel has to be created first.
* @exception StateTransferException State transfer was not successful
*
*/
public synchronized void connect(String cluster_name,
Address target,
String state_id,
long timeout) throws ChannelException {
startStack(cluster_name);
boolean stateTransferOk=false;
boolean joinSuccessful=false;
boolean canFetchState=false;
// only connect if we are not a unicast channel
if(cluster_name != null) {
try {
Event connect_event=new Event(Event.CONNECT_WITH_STATE_TRANSFER, cluster_name);
Object res=downcall(connect_event); // waits forever until
// connected (or channel is
// closed)
joinSuccessful=!(res != null && res instanceof Exception);
if(!joinSuccessful) {
stopStack(true, false);
init();
throw new ChannelException("connect() failed", (Throwable)res);
}
connected=true;
notifyChannelConnected(this);
canFetchState=getView() != null && getView().size() > 1;
// if I am not the only member in cluster then
if(canFetchState) {
try {
// fetch state from target
stateTransferOk=getState(target, state_id, timeout, false);
if(!stateTransferOk) {
throw new StateTransferException(getLocalAddress() + " could not fetch state "
+ state_id
+ " from "
+ target);
}
}
catch(Exception e) {
throw new StateTransferException(getLocalAddress() + " could not fetch state "
+ state_id
+ " from "
+ target, e);
}
}
}
finally {
if(flush_supported && canFetchState)
stopFlush();
}
}
}
/**
* Disconnects the channel if it is connected. If the channel is closed,
* this operation is ignored<BR>
* Otherwise the following actions happen in the listed order<BR>
* <ol>
* <li> The JChannel sends a DISCONNECT event down the protocol stack<BR>
* <li> Blocks until the event has returned<BR>
* <li> Sends a STOP_QUEING event down the stack<BR>
* <li> Stops the protocol stack by calling ProtocolStack.stop()<BR>
* <li> Notifies the listener, if the listener is available<BR>
* </ol>
*/
public synchronized void disconnect() {
if(closed) return;
if(connected) {
if(cluster_name != null) {
// Send down a DISCONNECT event, which travels down to the GMS, where a response is returned
Event disconnect_event=new Event(Event.DISCONNECT, local_addr);
down(disconnect_event); // DISCONNECT is handled by each layer
}
connected=false;
stopStack(true, false);
notifyChannelDisconnected(this);
init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining
}
}
/**
* Destroys the channel.
* After this method has been called, the channel us unusable.<BR>
* This operation will disconnect the channel and close the channel receive queue immediately<BR>
*/
public synchronized void close() {
_close(true, true); // by default disconnect before closing channel and close mq
}
/** Shuts down the channel without disconnecting */
public synchronized void shutdown() {
down(new Event(Event.SHUTDOWN));
_close(false, true); // by default disconnect before closing channel and close mq
}
/**
* Opens the channel. Note that the channel is only open, but <em>not connected</em>.
* This does the following actions:
* <ol>
* <li> Resets the receiver queue by calling Queue.reset
* <li> Sets up the protocol stack by calling ProtocolStack.setup
* <li> Sets the closed flag to false
* </ol>
*/
public synchronized void open() throws ChannelException {
if(!closed)
throw new ChannelException("channel is already open");
try {
mq.reset();
// new stack is created on open() - bela June 12 2003
prot_stack=new ProtocolStack(this, props);
prot_stack.setup();
closed=false;
}
catch(Exception e) {
throw new ChannelException("failed to open channel" , e);
}
}
/**
* returns true if the Open operation has been called successfully
*/
public boolean isOpen() {
return !closed;
}
/**
* returns true if the Connect operation has been called successfully
*/
public boolean isConnected() {
return connected;
}
public int getNumMessages() {
return mq != null? mq.size() : -1;
}
public String dumpQueue() {
return Util.dumpQueue(mq);
}
/**
* Returns a map of statistics of the various protocols and of the channel itself.
* @return Map<String,Map>. A map where the keys are the protocols ("channel" pseudo key is
* used for the channel itself") and the values are property maps.
*/
public Map dumpStats() {
Map retval=prot_stack.dumpStats();
if(retval != null) {
Map tmp=dumpChannelStats();
if(tmp != null)
retval.put("channel", tmp);
}
return retval;
}
private Map dumpChannelStats() {
Map retval=new HashMap();
retval.put("sent_msgs", new Long(sent_msgs));
retval.put("sent_bytes", new Long(sent_bytes));
retval.put("received_msgs", new Long(received_msgs));
retval.put("received_bytes", new Long(received_bytes));
return retval;
}
/**
* Sends a message through the protocol stack.
* Implements the Transport interface.
*
* @param msg the message to be sent through the protocol stack,
* the destination of the message is specified inside the message itself
* @exception ChannelNotConnectedException
* @exception ChannelClosedException
*/
public void send(Message msg) throws ChannelNotConnectedException, ChannelClosedException {
checkClosedOrNotConnected();
if(msg == null)
throw new NullPointerException("msg is null");
if(stats) {
sent_msgs++;
sent_bytes+=msg.getLength();
}
down(new Event(Event.MSG, msg));
}
/**
* creates a new message with the destination address, and the source address
* and the object as the message value
* @param dst - the destination address of the message, null for all members
* @param src - the source address of the message
* @param obj - the value of the message
* @exception ChannelNotConnectedException
* @exception ChannelClosedException
* @see JChannel#send
*/
public void send(Address dst, Address src, Serializable obj) throws ChannelNotConnectedException, ChannelClosedException {
send(new Message(dst, src, obj));
}
/**
* Blocking receive method.
* This method returns the object that was first received by this JChannel and that has not been
* received before. After the object is received, it is removed from the receive queue.<BR>
* If you only want to inspect the object received without removing it from the queue call
* JChannel.peek<BR>
* If no messages are in the receive queue, this method blocks until a message is added or the operation times out<BR>
* By specifying a timeout of 0, the operation blocks forever, or until a message has been received.
* @param timeout the number of milliseconds to wait if the receive queue is empty. 0 means wait forever
* @exception TimeoutException if a timeout occured prior to a new message was received
* @exception ChannelNotConnectedException
* @exception ChannelClosedException
* @see JChannel#peek
* @deprecated Use a {@link Receiver} instead
*/
public Object receive(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException {
checkClosedOrNotConnected();
try {
Event evt=(timeout <= 0)? (Event)mq.remove() : (Event)mq.remove(timeout);
Object retval=getEvent(evt);
evt=null;
return retval;
}
catch(QueueClosedException queue_closed) {
throw new ChannelClosedException();
}
catch(TimeoutException t) {
throw t;
}
catch(Exception e) {
if(log.isErrorEnabled()) log.error("exception: " + e);
return null;
}
}
/**
* Just peeks at the next message, view or block. Does <em>not</em> install
* new view if view is received<BR>
* Does the same thing as JChannel.receive but doesn't remove the object from the
* receiver queue
*/
public Object peek(long timeout) throws ChannelNotConnectedException, ChannelClosedException, TimeoutException {
checkClosedOrNotConnected();
try {
Event evt=(timeout <= 0)? (Event)mq.peek() : (Event)mq.peek(timeout);
Object retval=getEvent(evt);
evt=null;
return retval;
}
catch(QueueClosedException queue_closed) {
if(log.isErrorEnabled()) log.error("exception: " + queue_closed);
return null;
}
catch(TimeoutException t) {
return null;
}
catch(Exception e) {
if(log.isErrorEnabled()) log.error("exception: " + e);
return null;
}
}
/**
* Returns the current view.
* <BR>
* If the channel is not connected or if it is closed it will return null.
* <BR>
* @return returns the current group view, or null if the channel is closed or disconnected
*/
public View getView() {
return closed || !connected ? null : my_view;
}
/**
* returns the local address of the channel
* returns null if the channel is closed
*/
public Address getLocalAddress() {
return closed ? null : local_addr;
}
/**
* returns the name of the channel
* if the channel is not connected or if it is closed it will return null
* @deprecated Use {@link #getClusterName()} instead
*/
public String getChannelName() {
return closed ? null : !connected ? null : cluster_name;
}
public String getClusterName() {
return closed ? null : !connected ? null : cluster_name;
}
/**
* Sets a channel option. The options can be one of the following:
* <UL>
* <LI> Channel.BLOCK
* <LI> Channel.LOCAL
* <LI> Channel.AUTO_RECONNECT
* <LI> Channel.AUTO_GETSTATE
* </UL>
* <P>
* There are certain dependencies between the options that you can set,
* I will try to describe them here.
* <P>
* Option: Channel.BLOCK<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true will set setOpt(VIEW, true) and the JChannel will receive BLOCKS and VIEW events<BR>
*<BR>
* Option: LOCAL<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true the JChannel will receive messages that it self sent out.<BR>
*<BR>
* Option: AUTO_RECONNECT<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true and the JChannel will try to reconnect when it is being closed<BR>
*<BR>
* Option: AUTO_GETSTATE<BR>
* Value: java.lang.Boolean<BR>
* Result: set to true, the AUTO_RECONNECT will be set to true and the JChannel will try to get the state after a close and reconnect happens<BR>
* <BR>
*
* @param option the parameter option Channel.VIEW, Channel.SUSPECT, etc
* @param value the value to set for this option
*
*/
public void setOpt(int option, Object value) {
if(closed) {
if(log.isWarnEnabled()) log.warn("channel is closed; option not set !");
return;
}
switch(option) {
case VIEW:
if(log.isWarnEnabled())
log.warn("option VIEW has been deprecated (it is always true now); this option is ignored");
break;
case SUSPECT:
if(log.isWarnEnabled())
log.warn("option SUSPECT has been deprecated (it is always true now); this option is ignored");
break;
case BLOCK:
if(value instanceof Boolean)
receive_blocks=((Boolean)value).booleanValue();
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
case GET_STATE_EVENTS:
if(log.isWarnEnabled())
log.warn("option GET_STATE_EVENTS has been deprecated (it is always true now); this option is ignored");
break;
case LOCAL:
if(value instanceof Boolean)
receive_local_msgs=((Boolean)value).booleanValue();
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
case AUTO_RECONNECT:
if(value instanceof Boolean)
auto_reconnect=((Boolean)value).booleanValue();
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
case AUTO_GETSTATE:
if(value instanceof Boolean) {
auto_getstate=((Boolean)value).booleanValue();
if(auto_getstate)
auto_reconnect=true;
}
else
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) +
" (" + value + "): value has to be Boolean");
break;
default:
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known");
break;
}
}
/**
* returns the value of an option.
* @param option the option you want to see the value for
* @return the object value, in most cases java.lang.Boolean
* @see JChannel#setOpt
*/
public Object getOpt(int option) {
switch(option) {
case VIEW:
return Boolean.TRUE;
case BLOCK:
return receive_blocks ? Boolean.TRUE : Boolean.FALSE;
case SUSPECT:
return Boolean.TRUE;
case AUTO_RECONNECT:
return auto_reconnect ? Boolean.TRUE : Boolean.FALSE;
case AUTO_GETSTATE:
return auto_getstate ? Boolean.TRUE : Boolean.FALSE;
case GET_STATE_EVENTS:
return Boolean.TRUE;
case LOCAL:
return receive_local_msgs ? Boolean.TRUE : Boolean.FALSE;
default:
if(log.isErrorEnabled()) log.error("option " + Channel.option2String(option) + " not known");
return null;
}
}
/**
* Called to acknowledge a block() (callback in <code>MembershipListener</code> or
* <code>BlockEvent</code> received from call to <code>receive()</code>).
* After sending blockOk(), no messages should be sent until a new view has been received.
* Calling this method on a closed channel has no effect.
*/
public void blockOk() {
}
/**
* Retrieves the current group state. Sends GET_STATE event down to STATE_TRANSFER layer.
* Blocks until STATE_TRANSFER sends up a GET_STATE_OK event or until <code>timeout</code>
* milliseconds have elapsed. The argument of GET_STATE_OK should be a single object.
* @param target the target member to receive the state from. if null, state is retrieved from coordinator
* @param timeout the number of milliseconds to wait for the operation to complete successfully. 0 waits until
* the state has been received
* @return true of the state was received, false if the operation timed out
*/
public boolean getState(Address target, long timeout) throws ChannelNotConnectedException, ChannelClosedException {
return getState(target,null,timeout);
}
/**
* Retrieves a substate (or partial state) from the target.
* @param target State provider. If null, coordinator is used
* @param state_id The ID of the substate. If null, the entire state will be transferred
* @param timeout the number of milliseconds to wait for the operation to complete successfully. 0 waits until
* the state has been received
* @return
* @throws ChannelNotConnectedException
* @throws ChannelClosedException
*/
public boolean getState(Address target, String state_id, long timeout) throws ChannelNotConnectedException, ChannelClosedException {
return getState(target, state_id, timeout, true);
}
/**
* Retrieves a substate (or partial state) from the target.
* @param target State provider. If null, coordinator is used
* @param state_id The ID of the substate. If null, the entire state will be transferred
* @param timeout the number of milliseconds to wait for the operation to complete successfully. 0 waits until
* the state has been received
* @return
* @throws ChannelNotConnectedException
* @throws ChannelClosedException
*/
public boolean getState(Address target, String state_id, long timeout,boolean useFlushIfPresent) throws ChannelNotConnectedException, ChannelClosedException {
checkClosedOrNotConnected();
if(!state_transfer_supported) {
throw new IllegalStateException("fetching state will fail as state transfer is not supported. "
+ "Add one of the STATE_TRANSFER protocols to your protocol configuration");
}
if(target == null)
target=determineCoordinator();
if(target != null && local_addr != null && target.equals(local_addr)) {
if(log.isTraceEnabled())
log.trace("cannot get state from myself (" + target + "): probably the first member");
return false;
}
StateTransferInfo state_info=new StateTransferInfo(target, state_id, timeout);
boolean initiateFlush = flush_supported && useFlushIfPresent;
if(initiateFlush)
startFlush(false);
state_promise.reset();
down(new Event(Event.GET_STATE, state_info));
Boolean b=state_promise.getResult(state_info.timeout);
if(initiateFlush)
stopFlush();
boolean state_transfer_successfull = b != null && b.booleanValue();
if(!state_transfer_successfull)
down(new Event(Event.RESUME_STABLE));
return state_transfer_successfull;
}
/**
* Retrieves the current group state. Sends GET_STATE event down to STATE_TRANSFER layer.
* Blocks until STATE_TRANSFER sends up a GET_STATE_OK event or until <code>timeout</code>
* milliseconds have elapsed. The argument of GET_STATE_OK should be a vector of objects.
* @param targets - the target members to receive the state from ( an Address list )
* @param timeout - the number of milliseconds to wait for the operation to complete successfully
* @return true of the state was received, false if the operation timed out
* @deprecated Not really needed - we always want to get the state from a single member,
* use {@link #getState(org.jgroups.Address, long)} instead
*/
public boolean getAllStates(Vector targets, long timeout) throws ChannelNotConnectedException, ChannelClosedException {
throw new UnsupportedOperationException("use getState() instead");
}
/**
* Called by the application is response to receiving a <code>getState()</code> object when
* calling <code>receive()</code>.
* When the application receives a getState() message on the receive() method,
* it should call returnState() to reply with the state of the application
* @param state The state of the application as a byte buffer
* (to send over the network).
*/
public void returnState(byte[] state) {
try {
StateTransferInfo state_info=new StateTransferInfo(null, null, 0L, state);
applstate_exchanger.exchange(state_info);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt();
}
}
/**
* Returns a substate as indicated by state_id
* @param state
* @param state_id
*/
public void returnState(byte[] state, String state_id) {
try {
StateTransferInfo state_info=new StateTransferInfo(null, state_id, 0L, state);
applstate_exchanger.exchange(state_info);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt();
}
}
/**
* Callback method <BR>
* Called by the ProtocolStack when a message is received.
* It will be added to the message queue from which subsequent
* <code>Receive</code>s will dequeue it.
* @param evt the event carrying the message from the protocol stack
*/
public Object up(Event evt) {
int type=evt.getType();
Message msg;
switch(type) {
case Event.MSG:
msg=(Message)evt.getArg();
if(stats) {
received_msgs++;
received_bytes+=msg.getLength();
}
if(!receive_local_msgs) { // discard local messages (sent by myself to me)
if(local_addr != null && msg.getSrc() != null)
if(local_addr.equals(msg.getSrc()))
return null;
}
break;
case Event.VIEW_CHANGE:
View tmp=(View)evt.getArg();
if(tmp instanceof MergeView)
my_view=new View(tmp.getVid(), tmp.getMembers());
else
my_view=tmp;
/*
* Bela&Vladimir Oct 27th,2006 (JGroups 2.4)- we need to switch to
* connected=true because client can invoke channel.getView() in
* viewAccepted() callback invoked on this thread
* (see Event.VIEW_CHANGE handling below)
*/
// not good: we are only connected when we returned from connect() - bela June 22 2007
// if(connected == false) {
// connected=true;
// }
break;
case Event.CONFIG:
Map<String,Object> config=(Map<String,Object>)evt.getArg();
if(config != null) {
if(config.containsKey("state_transfer")) {
state_transfer_supported=((Boolean)config.get("state_transfer")).booleanValue();
}
if(config.containsKey("flush_supported")) {
flush_supported=((Boolean)config.get("flush_supported")).booleanValue();
}
}
break;
case Event.INFO:
Map<String, Object> m = (Map<String, Object>) evt.getArg();
info.putAll(m);
break;
case Event.GET_STATE_OK:
StateTransferInfo state_info=(StateTransferInfo)evt.getArg();
byte[] state=state_info.state;
state_promise.setResult(state != null? Boolean.TRUE : Boolean.FALSE);
if(up_handler != null) {
return up_handler.up(evt);
}
if(state != null) {
String state_id=state_info.state_id;
if(receiver != null) {
try {
if(receiver instanceof ExtendedReceiver && state_id!=null)
((ExtendedReceiver)receiver).setState(state_id, state);
else
receiver.setState(state);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling setState() in receiver", t);
}
}
else {
try {mq.add(new Event(Event.STATE_RECEIVED, state_info));} catch(Exception e) {}
}
}
break;
case Event.STATE_TRANSFER_INPUTSTREAM_CLOSED:
state_promise.setResult(Boolean.TRUE);
break;
case Event.STATE_TRANSFER_INPUTSTREAM:
StateTransferInfo sti=(StateTransferInfo)evt.getArg();
InputStream is=sti.inputStream;
//Oct 13,2006 moved to down() when Event.STATE_TRANSFER_INPUTSTREAM_CLOSED is received
//state_promise.setResult(is != null? Boolean.TRUE : Boolean.FALSE);
if(up_handler != null) {
return up_handler.up(evt);
}
if(is != null) {
if(receiver instanceof ExtendedReceiver) {
try {
if(sti.state_id == null)
((ExtendedReceiver)receiver).setState(is);
else
((ExtendedReceiver)receiver).setState(sti.state_id, is);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling setState() in receiver", t);
}
}
else if(receiver instanceof Receiver){
if(log.isWarnEnabled()){
log.warn("Channel has STREAMING_STATE_TRANSFER, however," +
" application does not implement ExtendedMessageListener. State is not transfered");
Util.close(is);
}
}
else {
try {
mq.add(new Event(Event.STATE_TRANSFER_INPUTSTREAM, sti));
}
catch(Exception e) {
}
}
}
break;
case Event.SET_LOCAL_ADDRESS:
local_addr_promise.setResult((Address)evt.getArg());
break;
case Event.EXIT:
handleExit(evt);
return null; // no need to pass event up; already done in handleExit()
default:
break;
}
// If UpHandler is installed, pass all events to it and return (UpHandler is e.g. a building block)
if(up_handler != null) {
Object ret=up_handler.up(evt);
if(type == Event.UNBLOCK){
flush_unblock_promise.setResult(Boolean.TRUE);
}
return ret;
}
switch(type) {
case Event.MSG:
if(receiver != null) {
try {
receiver.receive((Message)evt.getArg());
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling receive() in receiver", t);
}
return null;
}
break;
case Event.VIEW_CHANGE:
if(receiver != null) {
try {
receiver.viewAccepted((View)evt.getArg());
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling viewAccepted() in receiver", t);
}
return null;
}
break;
case Event.SUSPECT:
if(receiver != null) {
try {
receiver.suspect((Address)evt.getArg());
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling suspect() in receiver", t);
}
return null;
}
break;
case Event.GET_APPLSTATE:
if(receiver != null) {
StateTransferInfo state_info=(StateTransferInfo)evt.getArg();
byte[] tmp_state=null;
String state_id=state_info.state_id;
try {
if(receiver instanceof ExtendedReceiver && state_id!=null) {
tmp_state=((ExtendedReceiver)receiver).getState(state_id);
}
else {
tmp_state=receiver.getState();
}
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling getState() in receiver", t);
}
return new StateTransferInfo(null, state_id, 0L, tmp_state);
}
break;
case Event.STATE_TRANSFER_OUTPUTSTREAM:
StateTransferInfo sti=(StateTransferInfo)evt.getArg();
OutputStream os=sti.outputStream;
if(receiver instanceof ExtendedReceiver) {
if(os != null) {
try {
if(sti.state_id == null)
((ExtendedReceiver)receiver).getState(os);
else
((ExtendedReceiver)receiver).getState(sti.state_id, os);
}
catch(Throwable t) {
if(log.isWarnEnabled())
log.warn("failed calling getState() in receiver", t);
}
}
}
else if(receiver instanceof Receiver){
if(log.isWarnEnabled()){
log.warn("Channel has STREAMING_STATE_TRANSFER, however," +
" application does not implement ExtendedMessageListener. State is not transfered");
Util.close(os);
}
}
break;
case Event.BLOCK:
if(!receive_blocks) { // discard if client has not set 'receiving blocks' to 'on'
return Boolean.TRUE;
}
if(receiver != null) {
try {
receiver.block();
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed calling block() in receiver", t);
}
return Boolean.TRUE;
}
break;
case Event.UNBLOCK:
//invoke receiver if block receiving is on
if(receive_blocks && receiver instanceof ExtendedReceiver) {
try {
((ExtendedReceiver)receiver).unblock();
}
catch(Throwable t) {
if(log.isErrorEnabled())
log.error("failed calling unblock() in receiver", t);
}
}
//flip promise
flush_unblock_promise.setResult(Boolean.TRUE);
return null;
default:
break;
}
if(type == Event.MSG || type == Event.VIEW_CHANGE || type == Event.SUSPECT ||
type == Event.GET_APPLSTATE || type== Event.STATE_TRANSFER_OUTPUTSTREAM
|| type == Event.BLOCK || type == Event.UNBLOCK) {
try {
mq.add(evt);
}
catch(QueueClosedException queue_closed) {
; // ignore
}
catch(Exception e) {
if(log.isWarnEnabled()) log.warn("exception adding event " + evt + " to message queue", e);
}
}
if(type == Event.GET_APPLSTATE) {
try {
return applstate_exchanger.exchange(null);
}
catch(InterruptedException e) {
Thread.currentThread().interrupt();
return null;
}
}
return null;
}
/**
* Sends a message through the protocol stack if the stack is available
* @param evt the message to send down, encapsulated in an event
*/
public void down(Event evt) {
if(evt == null) return;
switch(evt.getType()) {
case Event.CONFIG:
try {
Map<String,Object> m=(Map<String,Object>)evt.getArg();
if(m != null) {
additional_data.putAll(m);
if(m.containsKey("additional_data")) {
byte[] tmp=(byte[])m.get("additional_data");
if(local_addr instanceof IpAddress)
((IpAddress)local_addr).setAdditionalData(tmp);
}
}
}
catch(Throwable t) {
if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t);
}
break;
}
prot_stack.down(evt);
}
public Object downcall(Event evt) {
if(evt == null) return null;
switch(evt.getType()) {
case Event.CONFIG:
try {
Map<String,Object> m=(Map<String,Object>)evt.getArg();
if(m != null) {
additional_data.putAll(m);
if(m.containsKey("additional_data")) {
byte[] tmp=(byte[])m.get("additional_data");
if(local_addr instanceof IpAddress)
((IpAddress)local_addr).setAdditionalData(tmp);
}
}
}
catch(Throwable t) {
if(log.isErrorEnabled()) log.error("CONFIG event did not contain a hashmap: " + t);
}
break;
}
return prot_stack.down(evt);
}
public String toString(boolean details) {
StringBuilder sb=new StringBuilder();
sb.append("local_addr=").append(local_addr).append('\n');
sb.append("cluster_name=").append(cluster_name).append('\n');
sb.append("my_view=").append(my_view).append('\n');
sb.append("connected=").append(connected).append('\n');
sb.append("closed=").append(closed).append('\n');
if(mq != null)
sb.append("incoming queue size=").append(mq.size()).append('\n');
if(details) {
sb.append("receive_blocks=").append(receive_blocks).append('\n');
sb.append("receive_local_msgs=").append(receive_local_msgs).append('\n');
sb.append("auto_reconnect=").append(auto_reconnect).append('\n');
sb.append("auto_getstate=").append(auto_getstate).append('\n');
sb.append("state_transfer_supported=").append(state_transfer_supported).append('\n');
sb.append("props=").append(props).append('\n');
}
return sb.toString();
}
/* ----------------------------------- Private Methods ------------------------------------- */
protected final void init(ProtocolStackConfigurator configurator) throws ChannelException {
if(log.isInfoEnabled())
log.info("JGroups version: " + Version.description);
ConfiguratorFactory.substituteVariables(configurator); // replace vars with system props
props=configurator.getProtocolStackString();
prot_stack=new ProtocolStack(this, props);
try {
prot_stack.setup(); // Setup protocol stack (creates protocol, calls init() on them)
}
catch(Throwable e) {
throw new ChannelException("unable to setup the protocol stack", e);
}
}
/**
* Initializes all variables. Used after <tt>close()</tt> or <tt>disconnect()</tt>,
* to be ready for new <tt>connect()</tt>
*/
private void init() {
local_addr=null;
cluster_name=null;
my_view=null;
// changed by Bela Sept 25 2003
//if(mq != null && mq.closed())
// mq.reset();
connected=false;
}
private void startStack(String cluster_name) throws ChannelException {
/*make sure the channel is not closed*/
checkClosed();
/*if we already are connected, then ignore this*/
if(connected) {
if(log.isTraceEnabled()) log.trace("already connected to " + cluster_name);
return;
}
/*make sure we have a valid channel name*/
if(cluster_name == null) {
if(log.isDebugEnabled()) log.debug("cluster_name is null, assuming unicast channel");
}
else
this.cluster_name=cluster_name;
try {
prot_stack.startStack(cluster_name); // calls start() in all protocols, from top to bottom
}
catch(Throwable e) {
throw new ChannelException("failed to start protocol stack", e);
}
String tmp=Util.getProperty(new String[]{Global.CHANNEL_LOCAL_ADDR_TIMEOUT, "local_addr.timeout"},
null, null, false, "30000");
LOCAL_ADDR_TIMEOUT=Long.parseLong(tmp);
/* Wait LOCAL_ADDR_TIMEOUT milliseconds for local_addr to have a non-null value (set by SET_LOCAL_ADDRESS) */
local_addr=local_addr_promise.getResult(LOCAL_ADDR_TIMEOUT);
if(local_addr == null) {
log.fatal("local_addr is null; cannot connect");
throw new ChannelException("local_addr is null");
}
/*create a temporary view, assume this channel is the only member and is the coordinator*/
Vector t=new Vector(1);
t.addElement(local_addr);
my_view=new View(local_addr, 0, t); // create a dummy view
}
/**
* health check<BR>
* throws a ChannelClosed exception if the channel is closed
*/
protected void checkClosed() throws ChannelClosedException {
if(closed)
throw new ChannelClosedException();
}
protected void checkClosedOrNotConnected() throws ChannelNotConnectedException, ChannelClosedException {
if(closed)
throw new ChannelClosedException();
if(!connected)
throw new ChannelNotConnectedException();
}
/**
* returns the value of the event<BR>
* These objects will be returned<BR>
* <PRE>
* <B>Event Type - Return Type</B>
* Event.MSG - returns a Message object
* Event.VIEW_CHANGE - returns a View object
* Event.SUSPECT - returns a SuspectEvent object
* Event.BLOCK - returns a new BlockEvent object
* Event.GET_APPLSTATE - returns a GetStateEvent object
* Event.STATE_RECEIVED- returns a SetStateEvent object
* Event.Exit - returns an ExitEvent object
* All other - return the actual Event object
* </PRE>
* @param evt - the event of which you want to extract the value
* @return the event value if it matches the select list,
* returns null if the event is null
* returns the event itself if a match (See above) can not be made of the event type
*/
static Object getEvent(Event evt) {
if(evt == null)
return null; // correct ?
switch(evt.getType()) {
case Event.MSG:
return evt.getArg();
case Event.VIEW_CHANGE:
return evt.getArg();
case Event.SUSPECT:
return new SuspectEvent(evt.getArg());
case Event.BLOCK:
return new BlockEvent();
case Event.UNBLOCK:
return new UnblockEvent();
case Event.GET_APPLSTATE:
StateTransferInfo info=(StateTransferInfo)evt.getArg();
return new GetStateEvent(info.target, info.state_id);
case Event.STATE_RECEIVED:
info=(StateTransferInfo)evt.getArg();
return new SetStateEvent(info.state, info.state_id);
case Event.STATE_TRANSFER_OUTPUTSTREAM:
info = (StateTransferInfo)evt.getArg();
return new StreamingGetStateEvent(info.outputStream,info.state_id);
case Event.STATE_TRANSFER_INPUTSTREAM:
info = (StateTransferInfo)evt.getArg();
return new StreamingSetStateEvent(info.inputStream,info.state_id);
case Event.EXIT:
return new ExitEvent();
default:
return evt;
}
}
/**
* Disconnects and closes the channel.
* This method does the following things
* <ol>
* <li>Calls <code>this.disconnect</code> if the disconnect parameter is true
* <li>Calls <code>Queue.close</code> on mq if the close_mq parameter is true
* <li>Calls <code>ProtocolStack.stop</code> on the protocol stack
* <li>Calls <code>ProtocolStack.destroy</code> on the protocol stack
* <li>Sets the channel closed and channel connected flags to true and false
* <li>Notifies any channel listener of the channel close operation
* </ol>
*/
protected void _close(boolean disconnect, boolean close_mq) {
if(closed)
return;
if(disconnect)
disconnect(); // leave group if connected
if(close_mq)
closeMessageQueue(false);
stopStack(true, true);
closed=true;
connected=false;
notifyChannelClosed(this);
init(); // sets local_addr=null; changed March 18 2003 (bela) -- prevented successful rejoining
}
protected void stopStack(boolean disconnect, boolean destroy) {
if(prot_stack != null){
try{
if(disconnect)
prot_stack.stopStack(cluster_name);
if(destroy)
prot_stack.destroy();
}catch(Exception e){
if(log.isErrorEnabled())
log.error("failed destroying the protocol stack", e);
}
}
}
public final void closeMessageQueue(boolean flush_entries) {
if(mq != null)
mq.close(flush_entries);
}
/**
* Creates a separate thread to close the protocol stack.
* This is needed because the thread that called JChannel.up() with the EXIT event would
* hang waiting for up() to return, while up() actually tries to kill that very thread.
* This way, we return immediately and allow the thread to terminate.
*/
private void handleExit(Event evt) {
notifyChannelShunned();
if(closer != null && !closer.isAlive())
closer=null;
if(closer == null) {
if(log.isDebugEnabled())
log.debug("received an EXIT event, will leave the channel");
closer=new CloserThread(evt);
closer.start();
}
}
public boolean flushSupported() {
return flush_supported;
}
/**
* Will perform a flush of the system, ie. all pending messages are flushed out of the
* system and all members ack their reception. After this call returns, no member will
* be sending any messages until {@link #stopFlush()} is called.
* <p>
* In case of flush collisions, random sleep time backoff algorithm is employed and
* flush is reattempted for numberOfAttempts. Therefore this method is guaranteed
* to return after timeout x numberOfAttempts miliseconds.
*
* @param automatic_resume Call {@link #stopFlush()} after the flush
* @return true if FLUSH completed within the timeout
*/
public boolean startFlush(boolean automatic_resume) {
if(!flush_supported) {
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
boolean successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND));
if(automatic_resume)
stopFlush();
return successfulFlush;
}
/**
* Performs a partial flush in a cluster for flush participants.
* <p>
* All pending messages are flushed out only for flush participants.
* Remaining members in a cluster are not included in flush.
* Flush participants should be a proper subset of a current view.
*
* <p>
* In case of flush collisions, random sleep time backoff algorithm is employed and
* flush is reattempted for numberOfAttempts. Therefore this method is guaranteed
* to return after timeout x numberOfAttempts miliseconds.
*
* @param automatic_resume Call {@link #stopFlush()} after the flush
* @return true if FLUSH completed within the timeout
*/
public boolean startFlush(List<Address> flushParticipants,boolean automatic_resume) {
boolean successfulFlush = false;
if(!flush_supported){
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
View v = getView();
if(v != null && v.getMembers().containsAll(flushParticipants)){
successfulFlush = (Boolean) downcall(new Event(Event.SUSPEND, flushParticipants));
}else{
throw new IllegalArgumentException("Current view " + v
+ " does not contain all flush participants "
+ flushParticipants);
}
if(automatic_resume)
stopFlush(flushParticipants);
return successfulFlush;
}
/**
* Will perform a flush of the system, ie. all pending messages are flushed out of the
* system and all members ack their reception. After this call returns, no member will
* be sending any messages until {@link #stopFlush()} is called.
* <p>
* In case of flush collisions, random sleep time backoff algorithm is employed and
* flush is reattempted for numberOfAttempts. Therefore this method is guaranteed
* to return after timeout x numberOfAttempts miliseconds.
* @param timeout
* @param automatic_resume Call {@link #stopFlush()} after the flush
* @return true if FLUSH completed within the timeout
*/
public boolean startFlush(long timeout, boolean automatic_resume) {
return startFlush(automatic_resume);
}
public void stopFlush() {
if(!flush_supported) {
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
flush_unblock_promise.reset();
down(new Event(Event.RESUME));
//do not return until UNBLOCK event is received
try{
flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT);
}catch(TimeoutException te){
log.warn("Timeout waiting for UNBLOCK event at " + getLocalAddress());
}
}
public void stopFlush(List<Address> flushParticipants) {
if(!flush_supported){
throw new IllegalStateException("Flush is not supported, add pbcast.FLUSH protocol to your configuration");
}
flush_unblock_promise.reset();
down(new Event(Event.RESUME, flushParticipants));
// do not return until UNBLOCK event is received
try{
flush_unblock_promise.getResultWithTimeout(FLUSH_UNBLOCK_TIMEOUT);
}catch(TimeoutException te){
log.warn("Timeout waiting for UNBLOCK event at " + getLocalAddress());
}
}
@Override
public Map<String, Object> getInfo(){
return new HashMap<String, Object>(info);
}
public void setInfo(String key, Object value) {
if(key != null)
info.put(key, value);
}
Address determineCoordinator() {
Vector mbrs=my_view != null? my_view.getMembers() : null;
if(mbrs == null)
return null;
if(!mbrs.isEmpty())
return (Address)mbrs.firstElement();
return null;
}
/* ------------------------------- End of Private Methods ---------------------------------- */
class CloserThread extends Thread {
final Event evt;
final Thread t=null;
CloserThread(Event evt) {
super(Util.getGlobalThreadGroup(), "CloserThread");
this.evt=evt;
setDaemon(true);
}
public void run() {
try {
String old_cluster_name=cluster_name; // remember because close() will null it
if(log.isDebugEnabled())
log.debug("closing the channel");
_close(false, false); // do not disconnect before closing channel, do not close mq (yet !)
if(up_handler != null)
up_handler.up(this.evt);
else {
try {
if(receiver == null)
mq.add(this.evt);
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("exception: " + ex);
}
}
if(mq != null) {
Util.sleep(500); // give the mq thread a bit of time to deliver EXIT to the application
try {
mq.close(false);
}
catch(Exception ex) {
}
}
if(auto_reconnect) {
try {
if(log.isDebugEnabled()) log.debug("reconnecting to group " + old_cluster_name);
open();
if(additional_data != null) {
// send previously set additional_data down the stack - other protocols (e.g. TP) use it
Map<String,Object> m=new HashMap<String,Object>(additional_data);
down(new Event(Event.CONFIG, m));
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("failure reopening channel: " + ex);
return;
}
while(!connected) {
try {
connect(old_cluster_name);
notifyChannelReconnected(local_addr);
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("failure reconnecting to channel, retrying", ex);
Util.sleep(1000); // sleep 1 sec between reconnect attempts
}
}
}
if(auto_getstate) {
if(log.isDebugEnabled())
log.debug("fetching the state (auto_getstate=true)");
boolean rc=JChannel.this.getState(null, GET_STATE_DEFAULT_TIMEOUT);
if(log.isDebugEnabled()) {
if(rc)
log.debug("state was retrieved successfully");
else
log.debug("state transfer failed");
}
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) log.error("exception: " + ex);
}
finally {
closer=null;
}
}
}
}
|
reformatted to get rid of Vlad's weird formatting :-)
|
src/org/jgroups/JChannel.java
|
reformatted to get rid of Vlad's weird formatting :-)
|
|
Java
|
apache-2.0
|
752d13e87353c5154fac8dcc78ed8a32ae40bb36
| 0
|
gcoders/gerrit,joshuawilson/merrit,gracefullife/gerrit,Distrotech/gerrit,bootstraponline-archive/gerrit-mirror,gcoders/gerrit,WANdisco/gerrit,thesamet/gerrit,Overruler/gerrit,netroby/gerrit,gracefullife/gerrit,hdost/gerrit,hdost/gerrit,GerritCodeReview/gerrit,gracefullife/gerrit,thesamet/gerrit,gcoders/gerrit,gcoders/gerrit,bootstraponline-archive/gerrit-mirror,quyixia/gerrit,Seinlin/gerrit,gcoders/gerrit,midnightradio/gerrit,quyixia/gerrit,Overruler/gerrit,anminhsu/gerrit,joshuawilson/merrit,gerrit-review/gerrit,joshuawilson/merrit,Overruler/gerrit,thesamet/gerrit,Seinlin/gerrit,WANdisco/gerrit,Distrotech/gerrit,joshuawilson/merrit,GerritCodeReview/gerrit,Team-OctOS/host_gerrit,midnightradio/gerrit,Saulis/gerrit,jackminicloud/test,MerritCR/merrit,bpollack/gerrit,dwhipstock/gerrit,WANdisco/gerrit,Distrotech/gerrit,gcoders/gerrit,pkdevbox/gerrit,anminhsu/gerrit,MerritCR/merrit,Seinlin/gerrit,GerritCodeReview/gerrit,thinkernel/gerrit,Seinlin/gerrit,renchaorevee/gerrit,pkdevbox/gerrit,thinkernel/gerrit,qtproject/qtqa-gerrit,bootstraponline-archive/gerrit-mirror,joshuawilson/merrit,hdost/gerrit,Saulis/gerrit,renchaorevee/gerrit,Team-OctOS/host_gerrit,gracefullife/gerrit,qtproject/qtqa-gerrit,thinkernel/gerrit,MerritCR/merrit,qtproject/qtqa-gerrit,joshuawilson/merrit,thesamet/gerrit,supriyantomaftuh/gerrit,quyixia/gerrit,bootstraponline-archive/gerrit-mirror,supriyantomaftuh/gerrit,bpollack/gerrit,thesamet/gerrit,dwhipstock/gerrit,netroby/gerrit,GerritCodeReview/gerrit,Team-OctOS/host_gerrit,Seinlin/gerrit,hdost/gerrit,WANdisco/gerrit,gerrit-review/gerrit,supriyantomaftuh/gerrit,Seinlin/gerrit,TonyChai24/test,anminhsu/gerrit,MerritCR/merrit,pkdevbox/gerrit,jackminicloud/test,renchaorevee/gerrit,gracefullife/gerrit,anminhsu/gerrit,renchaorevee/gerrit,renchaorevee/gerrit,Overruler/gerrit,MerritCR/merrit,Overruler/gerrit,Distrotech/gerrit,thinkernel/gerrit,MerritCR/merrit,supriyantomaftuh/gerrit,jackminicloud/test,renchaorevee/gerrit,thinkernel/gerrit,midnightradio/gerrit,Distrotech/gerrit,Seinlin/gerrit,GerritCodeReview/gerrit,midnightradio/gerrit,thinkernel/gerrit,GerritCodeReview/gerrit,Distrotech/gerrit,Saulis/gerrit,hdost/gerrit,qtproject/qtqa-gerrit,gerrit-review/gerrit,pkdevbox/gerrit,TonyChai24/test,Saulis/gerrit,TonyChai24/test,qtproject/qtqa-gerrit,Team-OctOS/host_gerrit,Overruler/gerrit,TonyChai24/test,Team-OctOS/host_gerrit,netroby/gerrit,Distrotech/gerrit,gerrit-review/gerrit,Saulis/gerrit,dwhipstock/gerrit,TonyChai24/test,netroby/gerrit,WANdisco/gerrit,TonyChai24/test,WANdisco/gerrit,thinkernel/gerrit,anminhsu/gerrit,netroby/gerrit,dwhipstock/gerrit,thesamet/gerrit,quyixia/gerrit,midnightradio/gerrit,GerritCodeReview/gerrit,supriyantomaftuh/gerrit,bootstraponline-archive/gerrit-mirror,quyixia/gerrit,bpollack/gerrit,quyixia/gerrit,supriyantomaftuh/gerrit,dwhipstock/gerrit,quyixia/gerrit,anminhsu/gerrit,anminhsu/gerrit,bootstraponline-archive/gerrit-mirror,netroby/gerrit,Saulis/gerrit,GerritCodeReview/gerrit,jackminicloud/test,gerrit-review/gerrit,qtproject/qtqa-gerrit,MerritCR/merrit,hdost/gerrit,pkdevbox/gerrit,joshuawilson/merrit,qtproject/qtqa-gerrit,TonyChai24/test,dwhipstock/gerrit,netroby/gerrit,renchaorevee/gerrit,gerrit-review/gerrit,joshuawilson/merrit,jackminicloud/test,gerrit-review/gerrit,bpollack/gerrit,MerritCR/merrit,midnightradio/gerrit,supriyantomaftuh/gerrit,hdost/gerrit,jackminicloud/test,WANdisco/gerrit,jackminicloud/test,gcoders/gerrit,pkdevbox/gerrit,bpollack/gerrit,Team-OctOS/host_gerrit,bpollack/gerrit,dwhipstock/gerrit,thesamet/gerrit,pkdevbox/gerrit,Team-OctOS/host_gerrit
|
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm;
import static com.google.gerrit.lucene.IndexVersionCheck.SCHEMA_VERSIONS;
import static com.google.gerrit.lucene.IndexVersionCheck.gerritIndexConfig;
import static com.google.gerrit.lucene.LuceneChangeIndex.LUCENE_VERSION;
import static com.google.gerrit.server.schema.DataSourceProvider.Context.MULTI_USER;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AsyncFunction;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.extensions.registration.DynamicSet;
import com.google.gerrit.lifecycle.LifecycleManager;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gerrit.lucene.LuceneIndexModule;
import com.google.gerrit.pgm.util.SiteProgram;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.cache.CacheRemovalListener;
import com.google.gerrit.server.cache.h2.DefaultCacheFactory;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.git.MultiProgressMonitor;
import com.google.gerrit.server.git.MultiProgressMonitor.Task;
import com.google.gerrit.server.index.ChangeIndexer;
import com.google.gerrit.server.index.IndexExecutor;
import com.google.gerrit.server.index.IndexModule;
import com.google.gerrit.server.patch.PatchListCacheImpl;
import com.google.gerrit.server.patch.PatchListLoader;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Provider;
import com.google.inject.ProvisionException;
import com.google.inject.TypeLiteral;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryCache;
import org.eclipse.jgit.lib.TextProgressMonitor;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.io.DisabledOutputStream;
import org.kohsuke.args4j.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
public class Reindex extends SiteProgram {
private static final Logger log = LoggerFactory.getLogger(Reindex.class);
@Option(name = "--threads", usage = "Number of threads to use for indexing")
private int threads = Runtime.getRuntime().availableProcessors();
@Option(name = "--dry-run", usage = "Dry run: don't write anything to index")
private boolean dryRun;
@Option(name = "--verbose", usage = "Output debug information for each change")
private boolean verbose;
private Injector dbInjector;
private Injector sysInjector;
private SitePaths sitePaths;
@Override
public int run() throws Exception {
mustHaveValidSite();
dbInjector = createDbInjector(MULTI_USER);
if (!IndexModule.isEnabled(dbInjector)) {
throw die("Secondary index not enabled");
}
LifecycleManager dbManager = new LifecycleManager();
dbManager.add(dbInjector);
dbManager.start();
sitePaths = dbInjector.getInstance(SitePaths.class);
// Delete before any index may be created depending on this data.
deleteAll();
sysInjector = createSysInjector();
LifecycleManager sysManager = new LifecycleManager();
sysManager.add(sysInjector);
sysManager.start();
int result = indexAll();
writeVersion();
sysManager.stop();
dbManager.stop();
return result;
}
private Injector createSysInjector() {
List<Module> modules = Lists.newArrayList();
modules.add(PatchListCacheImpl.module());
modules.add(new LuceneIndexModule(false, threads, dryRun));
modules.add(new ReviewDbModule());
modules.add(new AbstractModule() {
@SuppressWarnings("rawtypes")
@Override
protected void configure() {
// Plugins are not loaded and we're just running through each change
// once, so don't worry about cache removal.
bind(new TypeLiteral<DynamicSet<CacheRemovalListener>>() {})
.toInstance(DynamicSet.<CacheRemovalListener> emptySet());
install(new DefaultCacheFactory.Module());
}
});
return dbInjector.createChildInjector(modules);
}
private class ReviewDbModule extends LifecycleModule {
@Override
protected void configure() {
final SchemaFactory<ReviewDb> schema = dbInjector.getInstance(
Key.get(new TypeLiteral<SchemaFactory<ReviewDb>>() {}));
final List<ReviewDb> dbs = Collections.synchronizedList(
Lists.<ReviewDb> newArrayListWithCapacity(threads + 1));
final ThreadLocal<ReviewDb> localDb = new ThreadLocal<ReviewDb>();
bind(ReviewDb.class).toProvider(new Provider<ReviewDb>() {
@Override
public ReviewDb get() {
ReviewDb db = localDb.get();
if (db == null) {
try {
db = schema.open();
dbs.add(db);
localDb.set(db);
} catch (OrmException e) {
throw new ProvisionException("unable to open ReviewDb", e);
}
}
return db;
}
});
listener().toInstance(new LifecycleListener() {
@Override
public void start() {
// Do nothing.
}
@Override
public void stop() {
for (ReviewDb db : dbs) {
db.close();
}
}
});
}
}
private void deleteAll() throws IOException {
if (dryRun) {
return;
}
for (String index : SCHEMA_VERSIONS.keySet()) {
File file = new File(sitePaths.index_dir, index);
if (file.exists()) {
Directory dir = FSDirectory.open(file);
try {
for (String name : dir.listAll()) {
dir.deleteFile(name);
}
} finally {
dir.close();
}
}
}
}
private int indexAll() throws Exception {
ReviewDb db = sysInjector.getInstance(ReviewDb.class);
ListeningScheduledExecutorService executor = sysInjector.getInstance(
Key.get(ListeningScheduledExecutorService.class, IndexExecutor.class));
ProgressMonitor pm = new TextProgressMonitor();
pm.start(1);
pm.beginTask("Collecting projects", ProgressMonitor.UNKNOWN);
Set<Project.NameKey> projects = Sets.newTreeSet();
int changeCount = 0;
try {
for (Change change : db.changes().all()) {
changeCount++;
if (projects.add(change.getProject())) {
pm.update(1);
}
}
} finally {
db.close();
}
pm.endTask();
final MultiProgressMonitor mpm =
new MultiProgressMonitor(System.err, "Reindexing changes");
final Task projTask = mpm.beginSubTask("projects", projects.size());
final Task doneTask = mpm.beginSubTask(null, changeCount);
final Task failedTask = mpm.beginSubTask("failed", MultiProgressMonitor.UNKNOWN);
Stopwatch sw = new Stopwatch().start();
final List<ListenableFuture<?>> futures =
Lists.newArrayListWithCapacity(projects.size());
final AtomicBoolean ok = new AtomicBoolean(true);
for (final Project.NameKey project : projects) {
final ListenableFuture<?> future = executor.submit(
new ReindexProject(project, doneTask, failedTask));
futures.add(future);
future.addListener(new Runnable() {
@Override
public void run() {
try {
future.get();
} catch (InterruptedException e) {
fail(project, e);
} catch (ExecutionException e) {
ok.set(false); // Logged by indexer.
} catch (RuntimeException e) {
failAndThrow(project, e);
} catch (Error e) {
failAndThrow(project, e);
} finally {
projTask.update(1);
}
}
private void fail(Project.NameKey project, Throwable t) {
log.error("Failed to index project " + project, t);
ok.set(false);
}
private void failAndThrow(Project.NameKey project, RuntimeException e) {
fail(project, e);
throw e;
}
private void failAndThrow(Project.NameKey project, Error e) {
fail(project, e);
throw e;
}
}, MoreExecutors.sameThreadExecutor());
}
mpm.waitFor(Futures.transform(Futures.successfulAsList(futures),
new AsyncFunction<List<?>, Void>() {
@Override
public ListenableFuture<Void> apply(List<?> input) throws Exception {
mpm.end();
return Futures.immediateFuture(null);
}
}));
double elapsed = sw.elapsed(TimeUnit.MILLISECONDS) / 1000d;
int n = doneTask.getCount() + failedTask.getCount();
System.out.format("Reindexed %d changes in %.01fs (%.01f/s)\n",
n, elapsed, n/elapsed);
return ok.get() ? 0 : 1;
}
private class ReindexProject implements Callable<Void> {
private final ChangeIndexer indexer;
private final Project.NameKey project;
private final ListMultimap<ObjectId, ChangeData> byId;
private final Task done;
private final Task failed;
private Repository repo;
private RevWalk walk;
private ReindexProject(Project.NameKey project, Task done, Task failed) {
this.indexer = sysInjector.getInstance(ChangeIndexer.class);
this.project = project;
this.byId = ArrayListMultimap.create();
this.done = done;
this.failed = failed;
}
@Override
public Void call() throws Exception {
ReviewDb db = sysInjector.getInstance(ReviewDb.class);
GitRepositoryManager mgr = sysInjector.getInstance(GitRepositoryManager.class);
repo = mgr.openRepository(project);
try {
Map<String, Ref> refs = repo.getAllRefs();
for (Change c : db.changes().byProject(project)) {
Ref r = refs.get(c.currentPatchSetId().toRefName());
if (r != null) {
byId.put(r.getObjectId(), new ChangeData(c));
}
}
walk();
} finally {
repo.close();
RepositoryCache.close(repo); // Only used once per Reindex call.
}
return null;
}
private void walk() throws Exception {
walk = new RevWalk(repo);
try {
// Walk only refs first to cover as many changes as we can without having
// to mark every single change.
for (Ref ref : repo.getRefDatabase().getRefs(Constants.R_HEADS).values()) {
RevObject o = walk.parseAny(ref.getObjectId());
if (o instanceof RevCommit) {
walk.markStart((RevCommit) o);
}
}
RevCommit bCommit;
while ((bCommit = walk.next()) != null && !byId.isEmpty()) {
if (byId.containsKey(bCommit)) {
getPathsAndIndex(bCommit);
byId.removeAll(bCommit);
}
}
for (ObjectId id : byId.keySet()) {
getPathsAndIndex(walk.parseCommit(id));
}
} finally {
walk.release();
}
}
private void getPathsAndIndex(RevCommit bCommit) throws Exception {
RevTree bTree = bCommit.getTree();
try {
RevTree aTree = aFor(bCommit, walk);
if (aTree == null) {
return;
}
DiffFormatter df = new DiffFormatter(DisabledOutputStream.INSTANCE);
try {
df.setRepository(repo);
List<ChangeData> cds = byId.get(bCommit);
if (!cds.isEmpty()) {
List<String> paths = getPaths(df.scan(aTree, bTree));
for (ChangeData cd : cds) {
cd.setCurrentFilePaths(paths);
indexer.indexTask(cd).call();
done.update(1);
if (verbose) {
System.out.println("Reindexed change " + cd.getId());
}
}
}
} finally {
df.release();
}
} catch (Exception e) {
fail("Failed to index commit " + bCommit.name(), e);
}
}
private List<String> getPaths(List<DiffEntry> filenames) {
Set<String> paths = Sets.newTreeSet();
for (DiffEntry e : filenames) {
if (e.getOldPath() != null) {
paths.add(e.getOldPath());
}
if (e.getNewPath() != null) {
paths.add(e.getNewPath());
}
}
return ImmutableList.copyOf(paths);
}
private RevTree aFor(RevCommit b, RevWalk walk) throws IOException {
switch (b.getParentCount()) {
case 0:
return walk.parseTree(emptyTree());
case 1:
RevCommit a = b.getParent(0);
walk.parseBody(a);
return walk.parseTree(a.getTree());
case 2:
return PatchListLoader.automerge(repo, walk, b);
default:
return null;
}
}
private ObjectId emptyTree() throws IOException {
ObjectInserter oi = repo.newObjectInserter();
try {
ObjectId id = oi.insert(Constants.OBJ_TREE, new byte[] {});
oi.flush();
return id;
} finally {
oi.release();
}
}
private void fail(String error, Exception e) {
log.warn(error, e);
if (verbose) {
System.out.println(error);
}
}
}
private void writeVersion() throws IOException,
ConfigInvalidException {
if (dryRun) {
return;
}
FileBasedConfig cfg =
new FileBasedConfig(gerritIndexConfig(sitePaths), FS.detect());
cfg.load();
for (Map.Entry<String, Integer> e : SCHEMA_VERSIONS.entrySet()) {
cfg.setInt("index", e.getKey(), "schemaVersion", e.getValue());
}
cfg.setEnum("lucene", null, "version", LUCENE_VERSION);
cfg.save();
}
}
|
gerrit-pgm/src/main/java/com/google/gerrit/pgm/Reindex.java
|
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm;
import static com.google.gerrit.lucene.IndexVersionCheck.SCHEMA_VERSIONS;
import static com.google.gerrit.lucene.IndexVersionCheck.gerritIndexConfig;
import static com.google.gerrit.lucene.LuceneChangeIndex.LUCENE_VERSION;
import static com.google.gerrit.server.schema.DataSourceProvider.Context.MULTI_USER;
import com.google.common.base.Stopwatch;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AsyncFunction;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningScheduledExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.gerrit.extensions.events.LifecycleListener;
import com.google.gerrit.extensions.registration.DynamicSet;
import com.google.gerrit.lifecycle.LifecycleManager;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gerrit.lucene.LuceneIndexModule;
import com.google.gerrit.pgm.util.SiteProgram;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.server.ReviewDb;
import com.google.gerrit.server.cache.CacheRemovalListener;
import com.google.gerrit.server.cache.h2.DefaultCacheFactory;
import com.google.gerrit.server.config.SitePaths;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.git.MultiProgressMonitor;
import com.google.gerrit.server.git.MultiProgressMonitor.Task;
import com.google.gerrit.server.index.ChangeIndexer;
import com.google.gerrit.server.index.IndexExecutor;
import com.google.gerrit.server.index.IndexModule;
import com.google.gerrit.server.patch.PatchListCacheImpl;
import com.google.gerrit.server.patch.PatchListLoader;
import com.google.gerrit.server.query.change.ChangeData;
import com.google.gwtorm.server.OrmException;
import com.google.gwtorm.server.SchemaFactory;
import com.google.inject.AbstractModule;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.Provider;
import com.google.inject.ProvisionException;
import com.google.inject.TypeLiteral;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.diff.DiffFormatter;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter;
import org.eclipse.jgit.lib.ProgressMonitor;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.lib.RepositoryCache;
import org.eclipse.jgit.lib.TextProgressMonitor;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTree;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.file.FileBasedConfig;
import org.eclipse.jgit.util.FS;
import org.eclipse.jgit.util.io.DisabledOutputStream;
import org.kohsuke.args4j.Option;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
public class Reindex extends SiteProgram {
private static final Logger log = LoggerFactory.getLogger(Reindex.class);
@Option(name = "--threads", usage = "Number of threads to use for indexing")
private int threads = Runtime.getRuntime().availableProcessors();
@Option(name = "--dry-run", usage = "Dry run: don't write anything to index")
private boolean dryRun;
private Injector dbInjector;
private Injector sysInjector;
private SitePaths sitePaths;
@Override
public int run() throws Exception {
mustHaveValidSite();
dbInjector = createDbInjector(MULTI_USER);
if (!IndexModule.isEnabled(dbInjector)) {
throw die("Secondary index not enabled");
}
LifecycleManager dbManager = new LifecycleManager();
dbManager.add(dbInjector);
dbManager.start();
sitePaths = dbInjector.getInstance(SitePaths.class);
// Delete before any index may be created depending on this data.
deleteAll();
sysInjector = createSysInjector();
LifecycleManager sysManager = new LifecycleManager();
sysManager.add(sysInjector);
sysManager.start();
int result = indexAll();
writeVersion();
sysManager.stop();
dbManager.stop();
return result;
}
private Injector createSysInjector() {
List<Module> modules = Lists.newArrayList();
modules.add(PatchListCacheImpl.module());
modules.add(new LuceneIndexModule(false, threads, dryRun));
modules.add(new ReviewDbModule());
modules.add(new AbstractModule() {
@SuppressWarnings("rawtypes")
@Override
protected void configure() {
// Plugins are not loaded and we're just running through each change
// once, so don't worry about cache removal.
bind(new TypeLiteral<DynamicSet<CacheRemovalListener>>() {})
.toInstance(DynamicSet.<CacheRemovalListener> emptySet());
install(new DefaultCacheFactory.Module());
}
});
return dbInjector.createChildInjector(modules);
}
private class ReviewDbModule extends LifecycleModule {
@Override
protected void configure() {
final SchemaFactory<ReviewDb> schema = dbInjector.getInstance(
Key.get(new TypeLiteral<SchemaFactory<ReviewDb>>() {}));
final List<ReviewDb> dbs = Collections.synchronizedList(
Lists.<ReviewDb> newArrayListWithCapacity(threads + 1));
final ThreadLocal<ReviewDb> localDb = new ThreadLocal<ReviewDb>();
bind(ReviewDb.class).toProvider(new Provider<ReviewDb>() {
@Override
public ReviewDb get() {
ReviewDb db = localDb.get();
if (db == null) {
try {
db = schema.open();
dbs.add(db);
localDb.set(db);
} catch (OrmException e) {
throw new ProvisionException("unable to open ReviewDb", e);
}
}
return db;
}
});
listener().toInstance(new LifecycleListener() {
@Override
public void start() {
// Do nothing.
}
@Override
public void stop() {
for (ReviewDb db : dbs) {
db.close();
}
}
});
}
}
private void deleteAll() throws IOException {
if (dryRun) {
return;
}
for (String index : SCHEMA_VERSIONS.keySet()) {
File file = new File(sitePaths.index_dir, index);
if (file.exists()) {
Directory dir = FSDirectory.open(file);
try {
for (String name : dir.listAll()) {
dir.deleteFile(name);
}
} finally {
dir.close();
}
}
}
}
private int indexAll() throws Exception {
ReviewDb db = sysInjector.getInstance(ReviewDb.class);
ListeningScheduledExecutorService executor = sysInjector.getInstance(
Key.get(ListeningScheduledExecutorService.class, IndexExecutor.class));
ProgressMonitor pm = new TextProgressMonitor();
pm.start(1);
pm.beginTask("Collecting projects", ProgressMonitor.UNKNOWN);
Set<Project.NameKey> projects = Sets.newTreeSet();
int changeCount = 0;
try {
for (Change change : db.changes().all()) {
changeCount++;
if (projects.add(change.getProject())) {
pm.update(1);
}
}
} finally {
db.close();
}
pm.endTask();
final MultiProgressMonitor mpm =
new MultiProgressMonitor(System.err, "Reindexing changes");
final Task projTask = mpm.beginSubTask("projects", projects.size());
final Task doneTask = mpm.beginSubTask(null, changeCount);
final Task failedTask = mpm.beginSubTask("failed", MultiProgressMonitor.UNKNOWN);
Stopwatch sw = new Stopwatch().start();
final List<ListenableFuture<?>> futures =
Lists.newArrayListWithCapacity(projects.size());
final AtomicBoolean ok = new AtomicBoolean(true);
for (final Project.NameKey project : projects) {
final ListenableFuture<?> future = executor.submit(
new ReindexProject(project, doneTask, failedTask));
futures.add(future);
future.addListener(new Runnable() {
@Override
public void run() {
try {
future.get();
} catch (InterruptedException e) {
fail(project, e);
} catch (ExecutionException e) {
ok.set(false); // Logged by indexer.
} catch (RuntimeException e) {
failAndThrow(project, e);
} catch (Error e) {
failAndThrow(project, e);
} finally {
projTask.update(1);
}
}
private void fail(Project.NameKey project, Throwable t) {
log.error("Failed to index project " + project, t);
ok.set(false);
}
private void failAndThrow(Project.NameKey project, RuntimeException e) {
fail(project, e);
throw e;
}
private void failAndThrow(Project.NameKey project, Error e) {
fail(project, e);
throw e;
}
}, MoreExecutors.sameThreadExecutor());
}
mpm.waitFor(Futures.transform(Futures.successfulAsList(futures),
new AsyncFunction<List<?>, Void>() {
@Override
public ListenableFuture<Void> apply(List<?> input) throws Exception {
mpm.end();
return Futures.immediateFuture(null);
}
}));
double elapsed = sw.elapsed(TimeUnit.MILLISECONDS) / 1000d;
int n = doneTask.getCount() + failedTask.getCount();
System.out.format("Reindexed %d changes in %.01fs (%.01f/s)\n",
n, elapsed, n/elapsed);
return ok.get() ? 0 : 1;
}
private class ReindexProject implements Callable<Void> {
private final ChangeIndexer indexer;
private final Project.NameKey project;
private final ListMultimap<ObjectId, ChangeData> byId;
private final Task done;
private final Task failed;
private Repository repo;
private RevWalk walk;
private ReindexProject(Project.NameKey project, Task done, Task failed) {
this.indexer = sysInjector.getInstance(ChangeIndexer.class);
this.project = project;
this.byId = ArrayListMultimap.create();
this.done = done;
this.failed = failed;
}
@Override
public Void call() throws Exception {
ReviewDb db = sysInjector.getInstance(ReviewDb.class);
GitRepositoryManager mgr = sysInjector.getInstance(GitRepositoryManager.class);
repo = mgr.openRepository(project);
try {
Map<String, Ref> refs = repo.getAllRefs();
for (Change c : db.changes().byProject(project)) {
Ref r = refs.get(c.currentPatchSetId().toRefName());
if (r != null) {
byId.put(r.getObjectId(), new ChangeData(c));
}
}
walk();
} finally {
repo.close();
RepositoryCache.close(repo); // Only used once per Reindex call.
}
return null;
}
private void walk() throws Exception {
walk = new RevWalk(repo);
try {
// Walk only refs first to cover as many changes as we can without having
// to mark every single change.
for (Ref ref : repo.getRefDatabase().getRefs(Constants.R_HEADS).values()) {
RevObject o = walk.parseAny(ref.getObjectId());
if (o instanceof RevCommit) {
walk.markStart((RevCommit) o);
}
}
RevCommit bCommit;
while ((bCommit = walk.next()) != null && !byId.isEmpty()) {
if (byId.containsKey(bCommit)) {
getPathsAndIndex(bCommit);
byId.removeAll(bCommit);
}
}
for (ObjectId id : byId.keySet()) {
getPathsAndIndex(walk.parseCommit(id));
}
} finally {
walk.release();
}
}
private void getPathsAndIndex(RevCommit bCommit) throws Exception {
RevTree bTree = bCommit.getTree();
try {
RevTree aTree = aFor(bCommit, walk);
if (aTree == null) {
return;
}
DiffFormatter df = new DiffFormatter(DisabledOutputStream.INSTANCE);
try {
df.setRepository(repo);
List<ChangeData> cds = byId.get(bCommit);
if (!cds.isEmpty()) {
List<String> paths = getPaths(df.scan(aTree, bTree));
for (ChangeData cd : cds) {
cd.setCurrentFilePaths(paths);
indexer.indexTask(cd).call();
done.update(1);
}
}
} finally {
df.release();
}
} catch (Exception e) {
log.warn("Failed to index changes for commit " + bCommit.name(), e);
failed.update(1);
}
}
private List<String> getPaths(List<DiffEntry> filenames) {
Set<String> paths = Sets.newTreeSet();
for (DiffEntry e : filenames) {
if (e.getOldPath() != null) {
paths.add(e.getOldPath());
}
if (e.getNewPath() != null) {
paths.add(e.getNewPath());
}
}
return ImmutableList.copyOf(paths);
}
private RevTree aFor(RevCommit b, RevWalk walk) throws IOException {
switch (b.getParentCount()) {
case 0:
return walk.parseTree(emptyTree());
case 1:
RevCommit a = b.getParent(0);
walk.parseBody(a);
return walk.parseTree(a.getTree());
case 2:
return PatchListLoader.automerge(repo, walk, b);
default:
return null;
}
}
private ObjectId emptyTree() throws IOException {
ObjectInserter oi = repo.newObjectInserter();
try {
ObjectId id = oi.insert(Constants.OBJ_TREE, new byte[] {});
oi.flush();
return id;
} finally {
oi.release();
}
}
}
private void writeVersion() throws IOException,
ConfigInvalidException {
if (dryRun) {
return;
}
FileBasedConfig cfg =
new FileBasedConfig(gerritIndexConfig(sitePaths), FS.detect());
cfg.load();
for (Map.Entry<String, Integer> e : SCHEMA_VERSIONS.entrySet()) {
cfg.setInt("index", e.getKey(), "schemaVersion", e.getValue());
}
cfg.setEnum("lucene", null, "version", LUCENE_VERSION);
cfg.save();
}
}
|
Add a verbose flag to reindex
Change-Id: I6c23fb1e8a342daa1dbae674037fd2f5435e758c
|
gerrit-pgm/src/main/java/com/google/gerrit/pgm/Reindex.java
|
Add a verbose flag to reindex
|
|
Java
|
apache-2.0
|
ab714bbb2155dc06fcc257f1c27bf0fd00035871
| 0
|
kotcrab/VisEditor,piotr-j/VisEditor,piotr-j/VisEditor,kotcrab/vis-editor,kotcrab/vis-editor
|
/*
* Copyright 2014-2016 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kotcrab.vis.editor;
import com.artemis.annotations.SkipWire;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.backends.lwjgl.LwjglGraphics;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.utils.Align;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.viewport.ScreenViewport;
import com.kotcrab.vis.editor.event.ProjectStatusEvent;
import com.kotcrab.vis.editor.event.ProjectStatusEvent.Status;
import com.kotcrab.vis.editor.module.VisContainers;
import com.kotcrab.vis.editor.module.editor.*;
import com.kotcrab.vis.editor.module.project.Project;
import com.kotcrab.vis.editor.module.project.ProjectModuleContainer;
import com.kotcrab.vis.editor.plugin.api.ContainerExtension.ExtensionScope;
import com.kotcrab.vis.editor.ui.EditorFrame;
import com.kotcrab.vis.editor.ui.NoProjectFilesOpenView;
import com.kotcrab.vis.editor.ui.WindowListener;
import com.kotcrab.vis.editor.ui.dialog.AsyncTaskProgressDialog;
import com.kotcrab.vis.editor.ui.dialog.NewProjectDialog;
import com.kotcrab.vis.editor.ui.dialog.SettingsDialog;
import com.kotcrab.vis.editor.ui.dialog.UnsavedResourcesDialog;
import com.kotcrab.vis.editor.ui.tabbedpane.MainContentTab;
import com.kotcrab.vis.editor.ui.tabbedpane.TabViewMode;
import com.kotcrab.vis.editor.util.ThreadUtils;
import com.kotcrab.vis.editor.util.async.AsyncTask;
import com.kotcrab.vis.editor.util.scene2d.VisGroup;
import com.kotcrab.vis.editor.util.vis.LaunchConfiguration;
import com.kotcrab.vis.ui.VisUI;
import com.kotcrab.vis.ui.util.dialog.Dialogs;
import com.kotcrab.vis.ui.util.dialog.Dialogs.OptionDialog;
import com.kotcrab.vis.ui.util.dialog.Dialogs.OptionDialogType;
import com.kotcrab.vis.ui.util.dialog.OptionDialogAdapter;
import com.kotcrab.vis.ui.util.value.PrefHeightIfVisibleValue;
import com.kotcrab.vis.ui.widget.VisSplitPane;
import com.kotcrab.vis.ui.widget.VisTable;
import com.kotcrab.vis.ui.widget.file.FileChooser;
import com.kotcrab.vis.ui.widget.file.SingleFileChooserListener;
import com.kotcrab.vis.ui.widget.tabbedpane.Tab;
import com.kotcrab.vis.ui.widget.tabbedpane.TabbedPaneAdapter;
import com.kotcrab.vis.ui.widget.tabbedpane.TabbedPaneListener;
import java.lang.reflect.Field;
/**
* VisEditor main ApplicationAdapter class. The main() method is located in {@link EditorFrame}
* @author Kotcrab
*/
public class Editor extends ApplicationAdapter {
public static Editor instance;
private EditorFrame frame;
private LaunchConfiguration launchConfig;
@SkipWire private Stage stage;
private VisGroup stageRoot;
private Table uiRoot;
private EditorModuleContainer editorMC;
private ProjectModuleContainer projectMC;
private TabsModule tabsModule;
private StatusBarModule statusBar;
private ProjectIOModule projectIO;
private FileChooserModule fileChooser;
private ExtensionStorageModule extensionStorage;
private GeneralSettingsModule settings;
private ColorSettingsModule colorSettings;
private ExperimentalSettingsModule experimentalSettings;
// TODO move to module
private Table mainContentTable;
private Table tabContentTable;
private VisTable quickAccessContentTable;
private VisSplitPane splitPane;
private NoProjectFilesOpenView noProjectFilesOpenView;
private SettingsDialog settingsDialog;
private boolean projectLoaded = false;
private MainContentTab tab;
private boolean exitInProgress;
private Tab quickAccessTab;
private ScreenViewport stageViewport;
public Editor (EditorFrame frame, LaunchConfiguration launchConfig) {
this.frame = frame;
this.launchConfig = launchConfig;
}
@Override
public void create () {
instance = this;
Log.debug("Starting loading");
Assets.load();
VisUI.load();
VisUI.setDefaultTitleAlign(Align.center);
FileChooser.setFavoritesPrefsName("com.kotcrab.vis.editor");
Log.debug("VisUI " + VisUI.VERSION + " loaded");
if (Gdx.graphics instanceof LwjglGraphics && ((LwjglGraphics) Gdx.graphics).isSoftwareMode()) {
Log.info("Running in software mode");
}
stage = createStage();
Gdx.input.setInputProcessor(stage);
uiRoot = new Table();
uiRoot.setFillParent(true);
stage.addActor(uiRoot);
createUI();
createModuleContainers();
createModulesUI();
Log.debug("Loading completed");
if (experimentalSettings.isUIScale() || launchConfig.scaleUIEnabled) {
stageViewport.setUnitsPerPixel(0.5f);
}
}
private Stage createStage () {
stageViewport = new ScreenViewport();
Stage stage = new Stage(stageViewport);
//the stage root is final field, by default group does not support actor changed events and we need that
//here we just set our custom group to get those events
try {
stageRoot = new VisGroup(stage);
Field field = stage.getClass().getDeclaredField("root");
field.setAccessible(true);
field.set(stage, stageRoot);
} catch (ReflectiveOperationException e) {
Log.exception(e);
}
return stage;
}
private void createUI () {
mainContentTable = new Table();
tabContentTable = new Table();
quickAccessContentTable = new VisTable();
splitPane = new VisSplitPane(null, null, true);
splitPane.setSplitAmount(0.77f);
settingsDialog = new SettingsDialog();
}
private void createModuleContainers () {
editorMC = new EditorModuleContainer();
projectMC = new ProjectModuleContainer(editorMC);
noProjectFilesOpenView = new NoProjectFilesOpenView(projectMC);
VisContainers.createEditorModules(editorMC, createTabsModuleListener(), createQuickAccessModuleListener());
editorMC.init();
editorMC.injectModules(this);
Array<EditorModule> modules = extensionStorage.getContainersExtensions(EditorModule.class, ExtensionScope.EDITOR);
editorMC.addAll(modules);
settingsDialog.addAll(editorMC.getModules());
}
public EditorModuleContainer getEditorModuleContainer () {
return editorMC;
}
private TabbedPaneListener createTabsModuleListener () {
return new TabbedPaneAdapter() {
@Override
public void switchedTab (Tab tab) {
mainContentTabChanged((MainContentTab) tab);
}
@Override
public void removedAllTabs () {
mainContentTabChanged(null);
}
};
}
private TabbedPaneListener createQuickAccessModuleListener () {
return new TabbedPaneAdapter() {
@Override
public void switchedTab (Tab tab) {
quickAccessViewChanged(tab);
}
@Override
public void removedAllTabs () {
quickAccessViewChanged(null);
}
};
}
private void createModulesUI () {
uiRoot.add(editorMC.get(MenuBarModule.class).getTable()).fillX().expandX().row();
uiRoot.add(editorMC.get(ToolbarModule.class).getTable()).fillX().expandX().row();
uiRoot.add(editorMC.get(TabsModule.class).getTable()).height(PrefHeightIfVisibleValue.INSTANCE).fillX().expandX().row();
uiRoot.add(mainContentTable).expand().fill().row();
uiRoot.add(editorMC.get(QuickAccessModule.class).getTable()).fillX().expandX().row();
uiRoot.add(editorMC.get(StatusBarModule.class).getTable()).fillX().expandX().row();
}
@Override
public void resize (int width, int height) {
stage.getViewport().update(width, height, true);
editorMC.resize();
projectMC.resize();
}
@Override
public void render () {
Color bgColor = colorSettings.getBackgroundColor();
Gdx.gl.glClearColor(bgColor.r, bgColor.g, bgColor.b, 1f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
stage.act(Gdx.graphics.getDeltaTime());
if (tab != null) tab.render(stage.getBatch());
stage.draw();
}
@Override
public void dispose () {
frame.dispose();
editorMC.dispose();
if (projectLoaded) projectMC.dispose();
stage.dispose();
Assets.dispose();
VisUI.dispose();
Log.dispose();
//make sure that application will exit eventually
Thread exitThread = new Thread(() -> {
ThreadUtils.sleep(5000);
//System.exit(-2);
//sometimes awt shutdown hook may deadlock on System.exit so I'm using runtime halt
Runtime.getRuntime().halt(-2);
}, "Force Exit");
exitThread.setDaemon(true);
exitThread.start();
}
public void showRestartDialog () {
OptionDialog optionDialog = Dialogs.showOptionDialog(stage, "Restart?",
"Editor restart is required to apply changes", OptionDialogType.YES_NO, new OptionDialogAdapter() {
@Override
public void yes () {
Editor.instance.requestExit(true);
}
});
optionDialog.setNoButtonText("Later");
optionDialog.setYesButtonText("Restart");
}
public void requestExit () {
requestExit(false);
}
/** @see #showRestartDialog() */
private void requestExit (boolean restartAfterExit) {
if (exitInProgress) return;
exitInProgress = true;
if (projectLoaded == false) {
showExitDialogIfNeeded(restartAfterExit);
return;
}
if (tabsModule.getDirtyTabCount() > 0) {
stage.addActor(new UnsavedResourcesDialog(tabsModule, new WindowListener() {
@Override
public void finished () {
showExitDialogIfNeeded(restartAfterExit);
}
@Override
public void canceled () {
exitInProgress = false;
}
}).fadeIn());
} else
showExitDialogIfNeeded(restartAfterExit);
}
private void showExitDialogIfNeeded (boolean restartAfterExit) {
//the "Do you want to restart" dialog was already displayed and user accepted so no need to display exit dialog even if it is enabled
if (restartAfterExit) {
exit(true);
return;
}
if (settings.isConfirmExit()) {
OptionDialog dialog = Dialogs.showOptionDialog(stage, "Confirm Exit", "Are you sure you want to exit VisEditor?", OptionDialogType.YES_CANCEL, new OptionDialogAdapter() {
@Override
public void yes () {
exit(false);
}
@Override
public void cancel () {
exitInProgress = false;
}
});
dialog.setYesButtonText("Exit");
} else
exit(false);
}
private void exit (boolean restartAfterExit) {
if (restartAfterExit) App.startNewInstance();
Gdx.app.exit();
}
public LaunchConfiguration getLaunchConfig () {
return launchConfig;
}
public Stage getStage () {
return stage;
}
public void requestProjectUnload () {
if (tabsModule.getDirtyTabCount() > 0)
stage.addActor(new UnsavedResourcesDialog(tabsModule, () -> doProjectUnloading()).fadeIn());
else
doProjectUnloading();
}
private void doProjectUnloading () {
projectLoaded = false;
settingsDialog.removeAll(projectMC.getModules());
projectMC.dispose();
statusBar.setText("Project unloaded");
App.eventBus.post(new ProjectStatusEvent(Status.Unloaded, projectMC.getProject()));
}
public void loadProjectDialog () {
fileChooser.pickFileOrDirectory(new SingleFileChooserListener() {
@Override
public void selected (FileHandle file) {
editorMC.get(ProjectIOModule.class).loadHandleError(stage, file);
}
});
}
public void newProjectDialog () {
stage.addActor(new NewProjectDialog(fileChooser, projectIO).fadeIn());
}
public void projectLoaded (final Project project) {
if (projectLoaded) {
Dialogs.showOptionDialog(stage, "Warning", "Other project is already loaded, unload it and continue?", OptionDialogType.YES_CANCEL, new OptionDialogAdapter() {
@Override
public void yes () {
switchProject(project);
}
});
return;
}
ProjectLoadingDialogController controller = new ProjectLoadingDialogController();
AsyncTaskProgressDialog dialog = new AsyncTaskProgressDialog("Loading Project", new AsyncTask("ProjectLoaderThread") {
@Override
public void execute () {
setProgressPercent(10);
setMessage("Loading project data...");
executeOnOpenGL(() -> {
projectLoaded = true;
projectMC.setProject(project);
VisContainers.createProjectModules(projectMC, extensionStorage);
});
setMessage("Initializing...");
setProgressPercent(50);
ThreadUtils.sleep(10);
executeOnOpenGL(() -> {
projectMC.init();
settingsDialog.addAll(projectMC.getModules());
statusBar.setText("Project loaded");
App.eventBus.post(new ProjectStatusEvent(Status.Loaded, project));
controller.loading = false;
});
while (controller.loading) {
ThreadUtils.sleep(10);
}
}
});
dialog.setVisible(true);
stage.addActor(dialog);
}
private void switchProject (final Project project) {
requestProjectUnload();
Gdx.app.postRunnable(() -> projectLoaded(project));
}
public void showSettingsWindow () {
stage.addActor(settingsDialog.fadeIn());
}
private void mainContentTabChanged (MainContentTab tab) {
this.tab = tab;
if (tab == null)
frame.setTitle("VisEditor");
else
frame.setTitle("VisEditor - " + tab.getTabTitle());
tabContentTable.clear();
if (tab != null)
tabContentTable.add(tab.getContentTable()).expand().fill();
else if (projectLoaded)
tabContentTable.add(noProjectFilesOpenView).center();
updateRootView();
}
private void quickAccessViewChanged (Tab tab) {
quickAccessTab = tab;
quickAccessContentTable.clear();
if (tab != null)
quickAccessContentTable.add(tab.getContentTable()).expand().fill();
updateRootView();
}
private void updateRootView () {
mainContentTable.clear();
splitPane.setWidgets(null, null);
if (tab != null && tab.getViewMode() == TabViewMode.TAB_ONLY || quickAccessTab == null)
mainContentTable.add(tabContentTable).expand().fill();
else {
splitPane.setWidgets(tabContentTable, quickAccessContentTable);
mainContentTable.add(splitPane).expand().fill();
}
}
private class ProjectLoadingDialogController {
public boolean loading = true;
}
}
|
Editor/src/com/kotcrab/vis/editor/Editor.java
|
/*
* Copyright 2014-2016 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kotcrab.vis.editor;
import com.artemis.annotations.SkipWire;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.backends.lwjgl.LwjglGraphics;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.GL20;
import com.badlogic.gdx.scenes.scene2d.Stage;
import com.badlogic.gdx.scenes.scene2d.ui.Table;
import com.badlogic.gdx.utils.Align;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.viewport.ScreenViewport;
import com.kotcrab.vis.editor.event.ProjectStatusEvent;
import com.kotcrab.vis.editor.event.ProjectStatusEvent.Status;
import com.kotcrab.vis.editor.module.VisContainers;
import com.kotcrab.vis.editor.module.editor.*;
import com.kotcrab.vis.editor.module.project.Project;
import com.kotcrab.vis.editor.module.project.ProjectModuleContainer;
import com.kotcrab.vis.editor.plugin.api.ContainerExtension.ExtensionScope;
import com.kotcrab.vis.editor.ui.EditorFrame;
import com.kotcrab.vis.editor.ui.NoProjectFilesOpenView;
import com.kotcrab.vis.editor.ui.WindowListener;
import com.kotcrab.vis.editor.ui.dialog.AsyncTaskProgressDialog;
import com.kotcrab.vis.editor.ui.dialog.NewProjectDialog;
import com.kotcrab.vis.editor.ui.dialog.SettingsDialog;
import com.kotcrab.vis.editor.ui.dialog.UnsavedResourcesDialog;
import com.kotcrab.vis.editor.ui.tabbedpane.MainContentTab;
import com.kotcrab.vis.editor.ui.tabbedpane.TabViewMode;
import com.kotcrab.vis.editor.util.ThreadUtils;
import com.kotcrab.vis.editor.util.async.AsyncTask;
import com.kotcrab.vis.editor.util.scene2d.VisGroup;
import com.kotcrab.vis.editor.util.vis.LaunchConfiguration;
import com.kotcrab.vis.ui.VisUI;
import com.kotcrab.vis.ui.util.dialog.Dialogs;
import com.kotcrab.vis.ui.util.dialog.Dialogs.OptionDialog;
import com.kotcrab.vis.ui.util.dialog.Dialogs.OptionDialogType;
import com.kotcrab.vis.ui.util.dialog.OptionDialogAdapter;
import com.kotcrab.vis.ui.widget.VisSplitPane;
import com.kotcrab.vis.ui.widget.VisTable;
import com.kotcrab.vis.ui.widget.file.FileChooser;
import com.kotcrab.vis.ui.widget.file.SingleFileChooserListener;
import com.kotcrab.vis.ui.widget.tabbedpane.Tab;
import com.kotcrab.vis.ui.widget.tabbedpane.TabbedPaneAdapter;
import com.kotcrab.vis.ui.widget.tabbedpane.TabbedPaneListener;
import java.lang.reflect.Field;
/**
* VisEditor main ApplicationAdapter class. The main() method is located in {@link EditorFrame}
* @author Kotcrab
*/
public class Editor extends ApplicationAdapter {
public static Editor instance;
private EditorFrame frame;
private LaunchConfiguration launchConfig;
@SkipWire private Stage stage;
private VisGroup stageRoot;
private Table uiRoot;
private EditorModuleContainer editorMC;
private ProjectModuleContainer projectMC;
private TabsModule tabsModule;
private StatusBarModule statusBar;
private ProjectIOModule projectIO;
private FileChooserModule fileChooser;
private ExtensionStorageModule extensionStorage;
private GeneralSettingsModule settings;
private ColorSettingsModule colorSettings;
private ExperimentalSettingsModule experimentalSettings;
// TODO move to module
private Table mainContentTable;
private Table tabContentTable;
private VisTable quickAccessContentTable;
private VisSplitPane splitPane;
private NoProjectFilesOpenView noProjectFilesOpenView;
private SettingsDialog settingsDialog;
private boolean projectLoaded = false;
private MainContentTab tab;
private boolean exitInProgress;
private Tab quickAccessTab;
private ScreenViewport stageViewport;
public Editor (EditorFrame frame, LaunchConfiguration launchConfig) {
this.frame = frame;
this.launchConfig = launchConfig;
}
@Override
public void create () {
instance = this;
Log.debug("Starting loading");
Assets.load();
VisUI.load();
VisUI.setDefaultTitleAlign(Align.center);
FileChooser.setFavoritesPrefsName("com.kotcrab.vis.editor");
Log.debug("VisUI " + VisUI.VERSION + " loaded");
if (Gdx.graphics instanceof LwjglGraphics && ((LwjglGraphics) Gdx.graphics).isSoftwareMode()) {
Log.info("Running in software mode");
}
stage = createStage();
Gdx.input.setInputProcessor(stage);
uiRoot = new Table();
uiRoot.setFillParent(true);
stage.addActor(uiRoot);
createUI();
createModuleContainers();
createModulesUI();
Log.debug("Loading completed");
if (experimentalSettings.isUIScale() || launchConfig.scaleUIEnabled) {
stageViewport.setUnitsPerPixel(0.5f);
}
}
private Stage createStage () {
stageViewport = new ScreenViewport();
Stage stage = new Stage(stageViewport);
//the stage root is final field, by default group does not support actor changed events and we need that
//here we just set our custom group to get those events
try {
stageRoot = new VisGroup(stage);
Field field = stage.getClass().getDeclaredField("root");
field.setAccessible(true);
field.set(stage, stageRoot);
} catch (ReflectiveOperationException e) {
Log.exception(e);
}
return stage;
}
private void createUI () {
mainContentTable = new Table();
tabContentTable = new Table();
quickAccessContentTable = new VisTable();
splitPane = new VisSplitPane(null, null, true);
splitPane.setSplitAmount(0.77f);
settingsDialog = new SettingsDialog();
}
private void createModuleContainers () {
editorMC = new EditorModuleContainer();
projectMC = new ProjectModuleContainer(editorMC);
noProjectFilesOpenView = new NoProjectFilesOpenView(projectMC);
VisContainers.createEditorModules(editorMC, createTabsModuleListener(), createQuickAccessModuleListener());
editorMC.init();
editorMC.injectModules(this);
Array<EditorModule> modules = extensionStorage.getContainersExtensions(EditorModule.class, ExtensionScope.EDITOR);
editorMC.addAll(modules);
settingsDialog.addAll(editorMC.getModules());
}
public EditorModuleContainer getEditorModuleContainer () {
return editorMC;
}
private TabbedPaneListener createTabsModuleListener () {
return new TabbedPaneAdapter() {
@Override
public void switchedTab (Tab tab) {
mainContentTabChanged((MainContentTab) tab);
}
@Override
public void removedAllTabs () {
mainContentTabChanged(null);
}
};
}
private TabbedPaneListener createQuickAccessModuleListener () {
return new TabbedPaneAdapter() {
@Override
public void switchedTab (Tab tab) {
quickAccessViewChanged(tab);
}
@Override
public void removedAllTabs () {
quickAccessViewChanged(null);
}
};
}
private void createModulesUI () {
uiRoot.add(editorMC.get(MenuBarModule.class).getTable()).fillX().expandX().row();
uiRoot.add(editorMC.get(ToolbarModule.class).getTable()).fillX().expandX().row();
uiRoot.add(editorMC.get(TabsModule.class).getTable()).fillX().expandX().row();
uiRoot.add(mainContentTable).expand().fill().row();
uiRoot.add(editorMC.get(QuickAccessModule.class).getTable()).fillX().expandX().row();
uiRoot.add(editorMC.get(StatusBarModule.class).getTable()).fillX().expandX().row();
}
@Override
public void resize (int width, int height) {
stage.getViewport().update(width, height, true);
editorMC.resize();
projectMC.resize();
}
@Override
public void render () {
Color bgColor = colorSettings.getBackgroundColor();
Gdx.gl.glClearColor(bgColor.r, bgColor.g, bgColor.b, 1f);
Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);
stage.act(Gdx.graphics.getDeltaTime());
if (tab != null) tab.render(stage.getBatch());
stage.draw();
}
@Override
public void dispose () {
frame.dispose();
editorMC.dispose();
if (projectLoaded) projectMC.dispose();
stage.dispose();
Assets.dispose();
VisUI.dispose();
Log.dispose();
//make sure that application will exit eventually
Thread exitThread = new Thread(() -> {
ThreadUtils.sleep(5000);
//System.exit(-2);
//sometimes awt shutdown hook may deadlock on System.exit so I'm using runtime halt
Runtime.getRuntime().halt(-2);
}, "Force Exit");
exitThread.setDaemon(true);
exitThread.start();
}
public void showRestartDialog () {
OptionDialog optionDialog = Dialogs.showOptionDialog(stage, "Restart?",
"Editor restart is required to apply changes", OptionDialogType.YES_NO, new OptionDialogAdapter() {
@Override
public void yes () {
Editor.instance.requestExit(true);
}
});
optionDialog.setNoButtonText("Later");
optionDialog.setYesButtonText("Restart");
}
public void requestExit () {
requestExit(false);
}
/** @see #showRestartDialog() */
private void requestExit (boolean restartAfterExit) {
if (exitInProgress) return;
exitInProgress = true;
if (projectLoaded == false) {
showExitDialogIfNeeded(restartAfterExit);
return;
}
if (tabsModule.getDirtyTabCount() > 0) {
stage.addActor(new UnsavedResourcesDialog(tabsModule, new WindowListener() {
@Override
public void finished () {
showExitDialogIfNeeded(restartAfterExit);
}
@Override
public void canceled () {
exitInProgress = false;
}
}).fadeIn());
} else
showExitDialogIfNeeded(restartAfterExit);
}
private void showExitDialogIfNeeded (boolean restartAfterExit) {
//the "Do you want to restart" dialog was already displayed and user accepted so no need to display exit dialog even if it is enabled
if (restartAfterExit) {
exit(true);
return;
}
if (settings.isConfirmExit()) {
OptionDialog dialog = Dialogs.showOptionDialog(stage, "Confirm Exit", "Are you sure you want to exit VisEditor?", OptionDialogType.YES_CANCEL, new OptionDialogAdapter() {
@Override
public void yes () {
exit(false);
}
@Override
public void cancel () {
exitInProgress = false;
}
});
dialog.setYesButtonText("Exit");
} else
exit(false);
}
private void exit (boolean restartAfterExit) {
if (restartAfterExit) App.startNewInstance();
Gdx.app.exit();
}
public LaunchConfiguration getLaunchConfig () {
return launchConfig;
}
public Stage getStage () {
return stage;
}
public void requestProjectUnload () {
if (tabsModule.getDirtyTabCount() > 0)
stage.addActor(new UnsavedResourcesDialog(tabsModule, () -> doProjectUnloading()).fadeIn());
else
doProjectUnloading();
}
private void doProjectUnloading () {
projectLoaded = false;
settingsDialog.removeAll(projectMC.getModules());
projectMC.dispose();
statusBar.setText("Project unloaded");
App.eventBus.post(new ProjectStatusEvent(Status.Unloaded, projectMC.getProject()));
}
public void loadProjectDialog () {
fileChooser.pickFileOrDirectory(new SingleFileChooserListener() {
@Override
public void selected (FileHandle file) {
editorMC.get(ProjectIOModule.class).loadHandleError(stage, file);
}
});
}
public void newProjectDialog () {
stage.addActor(new NewProjectDialog(fileChooser, projectIO).fadeIn());
}
public void projectLoaded (final Project project) {
if (projectLoaded) {
Dialogs.showOptionDialog(stage, "Warning", "Other project is already loaded, unload it and continue?", OptionDialogType.YES_CANCEL, new OptionDialogAdapter() {
@Override
public void yes () {
switchProject(project);
}
});
return;
}
ProjectLoadingDialogController controller = new ProjectLoadingDialogController();
AsyncTaskProgressDialog dialog = new AsyncTaskProgressDialog("Loading Project", new AsyncTask("ProjectLoaderThread") {
@Override
public void execute () {
setProgressPercent(10);
setMessage("Loading project data...");
executeOnOpenGL(() -> {
projectLoaded = true;
projectMC.setProject(project);
VisContainers.createProjectModules(projectMC, extensionStorage);
});
setMessage("Initializing...");
setProgressPercent(50);
ThreadUtils.sleep(10);
executeOnOpenGL(() -> {
projectMC.init();
settingsDialog.addAll(projectMC.getModules());
statusBar.setText("Project loaded");
App.eventBus.post(new ProjectStatusEvent(Status.Loaded, project));
controller.loading = false;
});
while (controller.loading) {
ThreadUtils.sleep(10);
}
}
});
dialog.setVisible(true);
stage.addActor(dialog);
}
private void switchProject (final Project project) {
requestProjectUnload();
Gdx.app.postRunnable(() -> projectLoaded(project));
}
public void showSettingsWindow () {
stage.addActor(settingsDialog.fadeIn());
}
private void mainContentTabChanged (MainContentTab tab) {
this.tab = tab;
if (tab == null)
frame.setTitle("VisEditor");
else
frame.setTitle("VisEditor - " + tab.getTabTitle());
tabContentTable.clear();
if (tab != null)
tabContentTable.add(tab.getContentTable()).expand().fill();
else if (projectLoaded)
tabContentTable.add(noProjectFilesOpenView).center();
updateRootView();
}
private void quickAccessViewChanged (Tab tab) {
quickAccessTab = tab;
quickAccessContentTable.clear();
if (tab != null)
quickAccessContentTable.add(tab.getContentTable()).expand().fill();
updateRootView();
}
private void updateRootView () {
mainContentTable.clear();
splitPane.setWidgets(null, null);
if (tab != null && tab.getViewMode() == TabViewMode.TAB_ONLY || quickAccessTab == null)
mainContentTable.add(tabContentTable).expand().fill();
else {
splitPane.setWidgets(tabContentTable, quickAccessContentTable);
mainContentTable.add(splitPane).expand().fill();
}
}
private class ProjectLoadingDialogController {
public boolean loading = true;
}
}
|
Fix tabs table height
|
Editor/src/com/kotcrab/vis/editor/Editor.java
|
Fix tabs table height
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.